xref: /linux/drivers/gpu/drm/radeon/evergreen.c (revision 110e6f26af80dfd90b6e5c645b1aed7228aa580d)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37 
38 /*
39  * Indirect registers accessor
40  */
41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43 	unsigned long flags;
44 	u32 r;
45 
46 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48 	r = RREG32(EVERGREEN_CG_IND_DATA);
49 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50 	return r;
51 }
52 
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55 	unsigned long flags;
56 
57 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59 	WREG32(EVERGREEN_CG_IND_DATA, (v));
60 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62 
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65 	unsigned long flags;
66 	u32 r;
67 
68 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72 	return r;
73 }
74 
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77 	unsigned long flags;
78 
79 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84 
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87 	unsigned long flags;
88 	u32 r;
89 
90 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94 	return r;
95 }
96 
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99 	unsigned long flags;
100 
101 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106 
107 static const u32 crtc_offsets[6] =
108 {
109 	EVERGREEN_CRTC0_REGISTER_OFFSET,
110 	EVERGREEN_CRTC1_REGISTER_OFFSET,
111 	EVERGREEN_CRTC2_REGISTER_OFFSET,
112 	EVERGREEN_CRTC3_REGISTER_OFFSET,
113 	EVERGREEN_CRTC4_REGISTER_OFFSET,
114 	EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116 
117 #include "clearstate_evergreen.h"
118 
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121 	0x98fc,
122 	0x9830,
123 	0x9834,
124 	0x9838,
125 	0x9870,
126 	0x9874,
127 	0x8a14,
128 	0x8b24,
129 	0x8bcc,
130 	0x8b10,
131 	0x8d00,
132 	0x8d04,
133 	0x8c00,
134 	0x8c04,
135 	0x8c08,
136 	0x8c0c,
137 	0x8d8c,
138 	0x8c20,
139 	0x8c24,
140 	0x8c28,
141 	0x8c18,
142 	0x8c1c,
143 	0x8cf0,
144 	0x8e2c,
145 	0x8e38,
146 	0x8c30,
147 	0x9508,
148 	0x9688,
149 	0x9608,
150 	0x960c,
151 	0x9610,
152 	0x9614,
153 	0x88c4,
154 	0x88d4,
155 	0xa008,
156 	0x900c,
157 	0x9100,
158 	0x913c,
159 	0x98f8,
160 	0x98f4,
161 	0x9b7c,
162 	0x3f8c,
163 	0x8950,
164 	0x8954,
165 	0x8a18,
166 	0x8b28,
167 	0x9144,
168 	0x9148,
169 	0x914c,
170 	0x3f90,
171 	0x3f94,
172 	0x915c,
173 	0x9160,
174 	0x9178,
175 	0x917c,
176 	0x9180,
177 	0x918c,
178 	0x9190,
179 	0x9194,
180 	0x9198,
181 	0x919c,
182 	0x91a8,
183 	0x91ac,
184 	0x91b0,
185 	0x91b4,
186 	0x91b8,
187 	0x91c4,
188 	0x91c8,
189 	0x91cc,
190 	0x91d0,
191 	0x91d4,
192 	0x91e0,
193 	0x91e4,
194 	0x91ec,
195 	0x91f0,
196 	0x91f4,
197 	0x9200,
198 	0x9204,
199 	0x929c,
200 	0x9150,
201 	0x802c,
202 };
203 
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209 				     int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211 				   u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213 
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219 
220 static const u32 evergreen_golden_registers[] =
221 {
222 	0x3f90, 0xffff0000, 0xff000000,
223 	0x9148, 0xffff0000, 0xff000000,
224 	0x3f94, 0xffff0000, 0xff000000,
225 	0x914c, 0xffff0000, 0xff000000,
226 	0x9b7c, 0xffffffff, 0x00000000,
227 	0x8a14, 0xffffffff, 0x00000007,
228 	0x8b10, 0xffffffff, 0x00000000,
229 	0x960c, 0xffffffff, 0x54763210,
230 	0x88c4, 0xffffffff, 0x000000c2,
231 	0x88d4, 0xffffffff, 0x00000010,
232 	0x8974, 0xffffffff, 0x00000000,
233 	0xc78, 0x00000080, 0x00000080,
234 	0x5eb4, 0xffffffff, 0x00000002,
235 	0x5e78, 0xffffffff, 0x001000f0,
236 	0x6104, 0x01000300, 0x00000000,
237 	0x5bc0, 0x00300000, 0x00000000,
238 	0x7030, 0xffffffff, 0x00000011,
239 	0x7c30, 0xffffffff, 0x00000011,
240 	0x10830, 0xffffffff, 0x00000011,
241 	0x11430, 0xffffffff, 0x00000011,
242 	0x12030, 0xffffffff, 0x00000011,
243 	0x12c30, 0xffffffff, 0x00000011,
244 	0xd02c, 0xffffffff, 0x08421000,
245 	0x240c, 0xffffffff, 0x00000380,
246 	0x8b24, 0xffffffff, 0x00ff0fff,
247 	0x28a4c, 0x06000000, 0x06000000,
248 	0x10c, 0x00000001, 0x00000001,
249 	0x8d00, 0xffffffff, 0x100e4848,
250 	0x8d04, 0xffffffff, 0x00164745,
251 	0x8c00, 0xffffffff, 0xe4000003,
252 	0x8c04, 0xffffffff, 0x40600060,
253 	0x8c08, 0xffffffff, 0x001c001c,
254 	0x8cf0, 0xffffffff, 0x08e00620,
255 	0x8c20, 0xffffffff, 0x00800080,
256 	0x8c24, 0xffffffff, 0x00800080,
257 	0x8c18, 0xffffffff, 0x20202078,
258 	0x8c1c, 0xffffffff, 0x00001010,
259 	0x28350, 0xffffffff, 0x00000000,
260 	0xa008, 0xffffffff, 0x00010000,
261 	0x5c4, 0xffffffff, 0x00000001,
262 	0x9508, 0xffffffff, 0x00000002,
263 	0x913c, 0x0000000f, 0x0000000a
264 };
265 
266 static const u32 evergreen_golden_registers2[] =
267 {
268 	0x2f4c, 0xffffffff, 0x00000000,
269 	0x54f4, 0xffffffff, 0x00000000,
270 	0x54f0, 0xffffffff, 0x00000000,
271 	0x5498, 0xffffffff, 0x00000000,
272 	0x549c, 0xffffffff, 0x00000000,
273 	0x5494, 0xffffffff, 0x00000000,
274 	0x53cc, 0xffffffff, 0x00000000,
275 	0x53c8, 0xffffffff, 0x00000000,
276 	0x53c4, 0xffffffff, 0x00000000,
277 	0x53c0, 0xffffffff, 0x00000000,
278 	0x53bc, 0xffffffff, 0x00000000,
279 	0x53b8, 0xffffffff, 0x00000000,
280 	0x53b4, 0xffffffff, 0x00000000,
281 	0x53b0, 0xffffffff, 0x00000000
282 };
283 
284 static const u32 cypress_mgcg_init[] =
285 {
286 	0x802c, 0xffffffff, 0xc0000000,
287 	0x5448, 0xffffffff, 0x00000100,
288 	0x55e4, 0xffffffff, 0x00000100,
289 	0x160c, 0xffffffff, 0x00000100,
290 	0x5644, 0xffffffff, 0x00000100,
291 	0xc164, 0xffffffff, 0x00000100,
292 	0x8a18, 0xffffffff, 0x00000100,
293 	0x897c, 0xffffffff, 0x06000100,
294 	0x8b28, 0xffffffff, 0x00000100,
295 	0x9144, 0xffffffff, 0x00000100,
296 	0x9a60, 0xffffffff, 0x00000100,
297 	0x9868, 0xffffffff, 0x00000100,
298 	0x8d58, 0xffffffff, 0x00000100,
299 	0x9510, 0xffffffff, 0x00000100,
300 	0x949c, 0xffffffff, 0x00000100,
301 	0x9654, 0xffffffff, 0x00000100,
302 	0x9030, 0xffffffff, 0x00000100,
303 	0x9034, 0xffffffff, 0x00000100,
304 	0x9038, 0xffffffff, 0x00000100,
305 	0x903c, 0xffffffff, 0x00000100,
306 	0x9040, 0xffffffff, 0x00000100,
307 	0xa200, 0xffffffff, 0x00000100,
308 	0xa204, 0xffffffff, 0x00000100,
309 	0xa208, 0xffffffff, 0x00000100,
310 	0xa20c, 0xffffffff, 0x00000100,
311 	0x971c, 0xffffffff, 0x00000100,
312 	0x977c, 0xffffffff, 0x00000100,
313 	0x3f80, 0xffffffff, 0x00000100,
314 	0xa210, 0xffffffff, 0x00000100,
315 	0xa214, 0xffffffff, 0x00000100,
316 	0x4d8, 0xffffffff, 0x00000100,
317 	0x9784, 0xffffffff, 0x00000100,
318 	0x9698, 0xffffffff, 0x00000100,
319 	0x4d4, 0xffffffff, 0x00000200,
320 	0x30cc, 0xffffffff, 0x00000100,
321 	0xd0c0, 0xffffffff, 0xff000100,
322 	0x802c, 0xffffffff, 0x40000000,
323 	0x915c, 0xffffffff, 0x00010000,
324 	0x9160, 0xffffffff, 0x00030002,
325 	0x9178, 0xffffffff, 0x00070000,
326 	0x917c, 0xffffffff, 0x00030002,
327 	0x9180, 0xffffffff, 0x00050004,
328 	0x918c, 0xffffffff, 0x00010006,
329 	0x9190, 0xffffffff, 0x00090008,
330 	0x9194, 0xffffffff, 0x00070000,
331 	0x9198, 0xffffffff, 0x00030002,
332 	0x919c, 0xffffffff, 0x00050004,
333 	0x91a8, 0xffffffff, 0x00010006,
334 	0x91ac, 0xffffffff, 0x00090008,
335 	0x91b0, 0xffffffff, 0x00070000,
336 	0x91b4, 0xffffffff, 0x00030002,
337 	0x91b8, 0xffffffff, 0x00050004,
338 	0x91c4, 0xffffffff, 0x00010006,
339 	0x91c8, 0xffffffff, 0x00090008,
340 	0x91cc, 0xffffffff, 0x00070000,
341 	0x91d0, 0xffffffff, 0x00030002,
342 	0x91d4, 0xffffffff, 0x00050004,
343 	0x91e0, 0xffffffff, 0x00010006,
344 	0x91e4, 0xffffffff, 0x00090008,
345 	0x91e8, 0xffffffff, 0x00000000,
346 	0x91ec, 0xffffffff, 0x00070000,
347 	0x91f0, 0xffffffff, 0x00030002,
348 	0x91f4, 0xffffffff, 0x00050004,
349 	0x9200, 0xffffffff, 0x00010006,
350 	0x9204, 0xffffffff, 0x00090008,
351 	0x9208, 0xffffffff, 0x00070000,
352 	0x920c, 0xffffffff, 0x00030002,
353 	0x9210, 0xffffffff, 0x00050004,
354 	0x921c, 0xffffffff, 0x00010006,
355 	0x9220, 0xffffffff, 0x00090008,
356 	0x9224, 0xffffffff, 0x00070000,
357 	0x9228, 0xffffffff, 0x00030002,
358 	0x922c, 0xffffffff, 0x00050004,
359 	0x9238, 0xffffffff, 0x00010006,
360 	0x923c, 0xffffffff, 0x00090008,
361 	0x9240, 0xffffffff, 0x00070000,
362 	0x9244, 0xffffffff, 0x00030002,
363 	0x9248, 0xffffffff, 0x00050004,
364 	0x9254, 0xffffffff, 0x00010006,
365 	0x9258, 0xffffffff, 0x00090008,
366 	0x925c, 0xffffffff, 0x00070000,
367 	0x9260, 0xffffffff, 0x00030002,
368 	0x9264, 0xffffffff, 0x00050004,
369 	0x9270, 0xffffffff, 0x00010006,
370 	0x9274, 0xffffffff, 0x00090008,
371 	0x9278, 0xffffffff, 0x00070000,
372 	0x927c, 0xffffffff, 0x00030002,
373 	0x9280, 0xffffffff, 0x00050004,
374 	0x928c, 0xffffffff, 0x00010006,
375 	0x9290, 0xffffffff, 0x00090008,
376 	0x9294, 0xffffffff, 0x00000000,
377 	0x929c, 0xffffffff, 0x00000001,
378 	0x802c, 0xffffffff, 0x40010000,
379 	0x915c, 0xffffffff, 0x00010000,
380 	0x9160, 0xffffffff, 0x00030002,
381 	0x9178, 0xffffffff, 0x00070000,
382 	0x917c, 0xffffffff, 0x00030002,
383 	0x9180, 0xffffffff, 0x00050004,
384 	0x918c, 0xffffffff, 0x00010006,
385 	0x9190, 0xffffffff, 0x00090008,
386 	0x9194, 0xffffffff, 0x00070000,
387 	0x9198, 0xffffffff, 0x00030002,
388 	0x919c, 0xffffffff, 0x00050004,
389 	0x91a8, 0xffffffff, 0x00010006,
390 	0x91ac, 0xffffffff, 0x00090008,
391 	0x91b0, 0xffffffff, 0x00070000,
392 	0x91b4, 0xffffffff, 0x00030002,
393 	0x91b8, 0xffffffff, 0x00050004,
394 	0x91c4, 0xffffffff, 0x00010006,
395 	0x91c8, 0xffffffff, 0x00090008,
396 	0x91cc, 0xffffffff, 0x00070000,
397 	0x91d0, 0xffffffff, 0x00030002,
398 	0x91d4, 0xffffffff, 0x00050004,
399 	0x91e0, 0xffffffff, 0x00010006,
400 	0x91e4, 0xffffffff, 0x00090008,
401 	0x91e8, 0xffffffff, 0x00000000,
402 	0x91ec, 0xffffffff, 0x00070000,
403 	0x91f0, 0xffffffff, 0x00030002,
404 	0x91f4, 0xffffffff, 0x00050004,
405 	0x9200, 0xffffffff, 0x00010006,
406 	0x9204, 0xffffffff, 0x00090008,
407 	0x9208, 0xffffffff, 0x00070000,
408 	0x920c, 0xffffffff, 0x00030002,
409 	0x9210, 0xffffffff, 0x00050004,
410 	0x921c, 0xffffffff, 0x00010006,
411 	0x9220, 0xffffffff, 0x00090008,
412 	0x9224, 0xffffffff, 0x00070000,
413 	0x9228, 0xffffffff, 0x00030002,
414 	0x922c, 0xffffffff, 0x00050004,
415 	0x9238, 0xffffffff, 0x00010006,
416 	0x923c, 0xffffffff, 0x00090008,
417 	0x9240, 0xffffffff, 0x00070000,
418 	0x9244, 0xffffffff, 0x00030002,
419 	0x9248, 0xffffffff, 0x00050004,
420 	0x9254, 0xffffffff, 0x00010006,
421 	0x9258, 0xffffffff, 0x00090008,
422 	0x925c, 0xffffffff, 0x00070000,
423 	0x9260, 0xffffffff, 0x00030002,
424 	0x9264, 0xffffffff, 0x00050004,
425 	0x9270, 0xffffffff, 0x00010006,
426 	0x9274, 0xffffffff, 0x00090008,
427 	0x9278, 0xffffffff, 0x00070000,
428 	0x927c, 0xffffffff, 0x00030002,
429 	0x9280, 0xffffffff, 0x00050004,
430 	0x928c, 0xffffffff, 0x00010006,
431 	0x9290, 0xffffffff, 0x00090008,
432 	0x9294, 0xffffffff, 0x00000000,
433 	0x929c, 0xffffffff, 0x00000001,
434 	0x802c, 0xffffffff, 0xc0000000
435 };
436 
437 static const u32 redwood_mgcg_init[] =
438 {
439 	0x802c, 0xffffffff, 0xc0000000,
440 	0x5448, 0xffffffff, 0x00000100,
441 	0x55e4, 0xffffffff, 0x00000100,
442 	0x160c, 0xffffffff, 0x00000100,
443 	0x5644, 0xffffffff, 0x00000100,
444 	0xc164, 0xffffffff, 0x00000100,
445 	0x8a18, 0xffffffff, 0x00000100,
446 	0x897c, 0xffffffff, 0x06000100,
447 	0x8b28, 0xffffffff, 0x00000100,
448 	0x9144, 0xffffffff, 0x00000100,
449 	0x9a60, 0xffffffff, 0x00000100,
450 	0x9868, 0xffffffff, 0x00000100,
451 	0x8d58, 0xffffffff, 0x00000100,
452 	0x9510, 0xffffffff, 0x00000100,
453 	0x949c, 0xffffffff, 0x00000100,
454 	0x9654, 0xffffffff, 0x00000100,
455 	0x9030, 0xffffffff, 0x00000100,
456 	0x9034, 0xffffffff, 0x00000100,
457 	0x9038, 0xffffffff, 0x00000100,
458 	0x903c, 0xffffffff, 0x00000100,
459 	0x9040, 0xffffffff, 0x00000100,
460 	0xa200, 0xffffffff, 0x00000100,
461 	0xa204, 0xffffffff, 0x00000100,
462 	0xa208, 0xffffffff, 0x00000100,
463 	0xa20c, 0xffffffff, 0x00000100,
464 	0x971c, 0xffffffff, 0x00000100,
465 	0x977c, 0xffffffff, 0x00000100,
466 	0x3f80, 0xffffffff, 0x00000100,
467 	0xa210, 0xffffffff, 0x00000100,
468 	0xa214, 0xffffffff, 0x00000100,
469 	0x4d8, 0xffffffff, 0x00000100,
470 	0x9784, 0xffffffff, 0x00000100,
471 	0x9698, 0xffffffff, 0x00000100,
472 	0x4d4, 0xffffffff, 0x00000200,
473 	0x30cc, 0xffffffff, 0x00000100,
474 	0xd0c0, 0xffffffff, 0xff000100,
475 	0x802c, 0xffffffff, 0x40000000,
476 	0x915c, 0xffffffff, 0x00010000,
477 	0x9160, 0xffffffff, 0x00030002,
478 	0x9178, 0xffffffff, 0x00070000,
479 	0x917c, 0xffffffff, 0x00030002,
480 	0x9180, 0xffffffff, 0x00050004,
481 	0x918c, 0xffffffff, 0x00010006,
482 	0x9190, 0xffffffff, 0x00090008,
483 	0x9194, 0xffffffff, 0x00070000,
484 	0x9198, 0xffffffff, 0x00030002,
485 	0x919c, 0xffffffff, 0x00050004,
486 	0x91a8, 0xffffffff, 0x00010006,
487 	0x91ac, 0xffffffff, 0x00090008,
488 	0x91b0, 0xffffffff, 0x00070000,
489 	0x91b4, 0xffffffff, 0x00030002,
490 	0x91b8, 0xffffffff, 0x00050004,
491 	0x91c4, 0xffffffff, 0x00010006,
492 	0x91c8, 0xffffffff, 0x00090008,
493 	0x91cc, 0xffffffff, 0x00070000,
494 	0x91d0, 0xffffffff, 0x00030002,
495 	0x91d4, 0xffffffff, 0x00050004,
496 	0x91e0, 0xffffffff, 0x00010006,
497 	0x91e4, 0xffffffff, 0x00090008,
498 	0x91e8, 0xffffffff, 0x00000000,
499 	0x91ec, 0xffffffff, 0x00070000,
500 	0x91f0, 0xffffffff, 0x00030002,
501 	0x91f4, 0xffffffff, 0x00050004,
502 	0x9200, 0xffffffff, 0x00010006,
503 	0x9204, 0xffffffff, 0x00090008,
504 	0x9294, 0xffffffff, 0x00000000,
505 	0x929c, 0xffffffff, 0x00000001,
506 	0x802c, 0xffffffff, 0xc0000000
507 };
508 
509 static const u32 cedar_golden_registers[] =
510 {
511 	0x3f90, 0xffff0000, 0xff000000,
512 	0x9148, 0xffff0000, 0xff000000,
513 	0x3f94, 0xffff0000, 0xff000000,
514 	0x914c, 0xffff0000, 0xff000000,
515 	0x9b7c, 0xffffffff, 0x00000000,
516 	0x8a14, 0xffffffff, 0x00000007,
517 	0x8b10, 0xffffffff, 0x00000000,
518 	0x960c, 0xffffffff, 0x54763210,
519 	0x88c4, 0xffffffff, 0x000000c2,
520 	0x88d4, 0xffffffff, 0x00000000,
521 	0x8974, 0xffffffff, 0x00000000,
522 	0xc78, 0x00000080, 0x00000080,
523 	0x5eb4, 0xffffffff, 0x00000002,
524 	0x5e78, 0xffffffff, 0x001000f0,
525 	0x6104, 0x01000300, 0x00000000,
526 	0x5bc0, 0x00300000, 0x00000000,
527 	0x7030, 0xffffffff, 0x00000011,
528 	0x7c30, 0xffffffff, 0x00000011,
529 	0x10830, 0xffffffff, 0x00000011,
530 	0x11430, 0xffffffff, 0x00000011,
531 	0xd02c, 0xffffffff, 0x08421000,
532 	0x240c, 0xffffffff, 0x00000380,
533 	0x8b24, 0xffffffff, 0x00ff0fff,
534 	0x28a4c, 0x06000000, 0x06000000,
535 	0x10c, 0x00000001, 0x00000001,
536 	0x8d00, 0xffffffff, 0x100e4848,
537 	0x8d04, 0xffffffff, 0x00164745,
538 	0x8c00, 0xffffffff, 0xe4000003,
539 	0x8c04, 0xffffffff, 0x40600060,
540 	0x8c08, 0xffffffff, 0x001c001c,
541 	0x8cf0, 0xffffffff, 0x08e00410,
542 	0x8c20, 0xffffffff, 0x00800080,
543 	0x8c24, 0xffffffff, 0x00800080,
544 	0x8c18, 0xffffffff, 0x20202078,
545 	0x8c1c, 0xffffffff, 0x00001010,
546 	0x28350, 0xffffffff, 0x00000000,
547 	0xa008, 0xffffffff, 0x00010000,
548 	0x5c4, 0xffffffff, 0x00000001,
549 	0x9508, 0xffffffff, 0x00000002
550 };
551 
552 static const u32 cedar_mgcg_init[] =
553 {
554 	0x802c, 0xffffffff, 0xc0000000,
555 	0x5448, 0xffffffff, 0x00000100,
556 	0x55e4, 0xffffffff, 0x00000100,
557 	0x160c, 0xffffffff, 0x00000100,
558 	0x5644, 0xffffffff, 0x00000100,
559 	0xc164, 0xffffffff, 0x00000100,
560 	0x8a18, 0xffffffff, 0x00000100,
561 	0x897c, 0xffffffff, 0x06000100,
562 	0x8b28, 0xffffffff, 0x00000100,
563 	0x9144, 0xffffffff, 0x00000100,
564 	0x9a60, 0xffffffff, 0x00000100,
565 	0x9868, 0xffffffff, 0x00000100,
566 	0x8d58, 0xffffffff, 0x00000100,
567 	0x9510, 0xffffffff, 0x00000100,
568 	0x949c, 0xffffffff, 0x00000100,
569 	0x9654, 0xffffffff, 0x00000100,
570 	0x9030, 0xffffffff, 0x00000100,
571 	0x9034, 0xffffffff, 0x00000100,
572 	0x9038, 0xffffffff, 0x00000100,
573 	0x903c, 0xffffffff, 0x00000100,
574 	0x9040, 0xffffffff, 0x00000100,
575 	0xa200, 0xffffffff, 0x00000100,
576 	0xa204, 0xffffffff, 0x00000100,
577 	0xa208, 0xffffffff, 0x00000100,
578 	0xa20c, 0xffffffff, 0x00000100,
579 	0x971c, 0xffffffff, 0x00000100,
580 	0x977c, 0xffffffff, 0x00000100,
581 	0x3f80, 0xffffffff, 0x00000100,
582 	0xa210, 0xffffffff, 0x00000100,
583 	0xa214, 0xffffffff, 0x00000100,
584 	0x4d8, 0xffffffff, 0x00000100,
585 	0x9784, 0xffffffff, 0x00000100,
586 	0x9698, 0xffffffff, 0x00000100,
587 	0x4d4, 0xffffffff, 0x00000200,
588 	0x30cc, 0xffffffff, 0x00000100,
589 	0xd0c0, 0xffffffff, 0xff000100,
590 	0x802c, 0xffffffff, 0x40000000,
591 	0x915c, 0xffffffff, 0x00010000,
592 	0x9178, 0xffffffff, 0x00050000,
593 	0x917c, 0xffffffff, 0x00030002,
594 	0x918c, 0xffffffff, 0x00010004,
595 	0x9190, 0xffffffff, 0x00070006,
596 	0x9194, 0xffffffff, 0x00050000,
597 	0x9198, 0xffffffff, 0x00030002,
598 	0x91a8, 0xffffffff, 0x00010004,
599 	0x91ac, 0xffffffff, 0x00070006,
600 	0x91e8, 0xffffffff, 0x00000000,
601 	0x9294, 0xffffffff, 0x00000000,
602 	0x929c, 0xffffffff, 0x00000001,
603 	0x802c, 0xffffffff, 0xc0000000
604 };
605 
606 static const u32 juniper_mgcg_init[] =
607 {
608 	0x802c, 0xffffffff, 0xc0000000,
609 	0x5448, 0xffffffff, 0x00000100,
610 	0x55e4, 0xffffffff, 0x00000100,
611 	0x160c, 0xffffffff, 0x00000100,
612 	0x5644, 0xffffffff, 0x00000100,
613 	0xc164, 0xffffffff, 0x00000100,
614 	0x8a18, 0xffffffff, 0x00000100,
615 	0x897c, 0xffffffff, 0x06000100,
616 	0x8b28, 0xffffffff, 0x00000100,
617 	0x9144, 0xffffffff, 0x00000100,
618 	0x9a60, 0xffffffff, 0x00000100,
619 	0x9868, 0xffffffff, 0x00000100,
620 	0x8d58, 0xffffffff, 0x00000100,
621 	0x9510, 0xffffffff, 0x00000100,
622 	0x949c, 0xffffffff, 0x00000100,
623 	0x9654, 0xffffffff, 0x00000100,
624 	0x9030, 0xffffffff, 0x00000100,
625 	0x9034, 0xffffffff, 0x00000100,
626 	0x9038, 0xffffffff, 0x00000100,
627 	0x903c, 0xffffffff, 0x00000100,
628 	0x9040, 0xffffffff, 0x00000100,
629 	0xa200, 0xffffffff, 0x00000100,
630 	0xa204, 0xffffffff, 0x00000100,
631 	0xa208, 0xffffffff, 0x00000100,
632 	0xa20c, 0xffffffff, 0x00000100,
633 	0x971c, 0xffffffff, 0x00000100,
634 	0xd0c0, 0xffffffff, 0xff000100,
635 	0x802c, 0xffffffff, 0x40000000,
636 	0x915c, 0xffffffff, 0x00010000,
637 	0x9160, 0xffffffff, 0x00030002,
638 	0x9178, 0xffffffff, 0x00070000,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x918c, 0xffffffff, 0x00010006,
642 	0x9190, 0xffffffff, 0x00090008,
643 	0x9194, 0xffffffff, 0x00070000,
644 	0x9198, 0xffffffff, 0x00030002,
645 	0x919c, 0xffffffff, 0x00050004,
646 	0x91a8, 0xffffffff, 0x00010006,
647 	0x91ac, 0xffffffff, 0x00090008,
648 	0x91b0, 0xffffffff, 0x00070000,
649 	0x91b4, 0xffffffff, 0x00030002,
650 	0x91b8, 0xffffffff, 0x00050004,
651 	0x91c4, 0xffffffff, 0x00010006,
652 	0x91c8, 0xffffffff, 0x00090008,
653 	0x91cc, 0xffffffff, 0x00070000,
654 	0x91d0, 0xffffffff, 0x00030002,
655 	0x91d4, 0xffffffff, 0x00050004,
656 	0x91e0, 0xffffffff, 0x00010006,
657 	0x91e4, 0xffffffff, 0x00090008,
658 	0x91e8, 0xffffffff, 0x00000000,
659 	0x91ec, 0xffffffff, 0x00070000,
660 	0x91f0, 0xffffffff, 0x00030002,
661 	0x91f4, 0xffffffff, 0x00050004,
662 	0x9200, 0xffffffff, 0x00010006,
663 	0x9204, 0xffffffff, 0x00090008,
664 	0x9208, 0xffffffff, 0x00070000,
665 	0x920c, 0xffffffff, 0x00030002,
666 	0x9210, 0xffffffff, 0x00050004,
667 	0x921c, 0xffffffff, 0x00010006,
668 	0x9220, 0xffffffff, 0x00090008,
669 	0x9224, 0xffffffff, 0x00070000,
670 	0x9228, 0xffffffff, 0x00030002,
671 	0x922c, 0xffffffff, 0x00050004,
672 	0x9238, 0xffffffff, 0x00010006,
673 	0x923c, 0xffffffff, 0x00090008,
674 	0x9240, 0xffffffff, 0x00070000,
675 	0x9244, 0xffffffff, 0x00030002,
676 	0x9248, 0xffffffff, 0x00050004,
677 	0x9254, 0xffffffff, 0x00010006,
678 	0x9258, 0xffffffff, 0x00090008,
679 	0x925c, 0xffffffff, 0x00070000,
680 	0x9260, 0xffffffff, 0x00030002,
681 	0x9264, 0xffffffff, 0x00050004,
682 	0x9270, 0xffffffff, 0x00010006,
683 	0x9274, 0xffffffff, 0x00090008,
684 	0x9278, 0xffffffff, 0x00070000,
685 	0x927c, 0xffffffff, 0x00030002,
686 	0x9280, 0xffffffff, 0x00050004,
687 	0x928c, 0xffffffff, 0x00010006,
688 	0x9290, 0xffffffff, 0x00090008,
689 	0x9294, 0xffffffff, 0x00000000,
690 	0x929c, 0xffffffff, 0x00000001,
691 	0x802c, 0xffffffff, 0xc0000000,
692 	0x977c, 0xffffffff, 0x00000100,
693 	0x3f80, 0xffffffff, 0x00000100,
694 	0xa210, 0xffffffff, 0x00000100,
695 	0xa214, 0xffffffff, 0x00000100,
696 	0x4d8, 0xffffffff, 0x00000100,
697 	0x9784, 0xffffffff, 0x00000100,
698 	0x9698, 0xffffffff, 0x00000100,
699 	0x4d4, 0xffffffff, 0x00000200,
700 	0x30cc, 0xffffffff, 0x00000100,
701 	0x802c, 0xffffffff, 0xc0000000
702 };
703 
704 static const u32 supersumo_golden_registers[] =
705 {
706 	0x5eb4, 0xffffffff, 0x00000002,
707 	0x5c4, 0xffffffff, 0x00000001,
708 	0x7030, 0xffffffff, 0x00000011,
709 	0x7c30, 0xffffffff, 0x00000011,
710 	0x6104, 0x01000300, 0x00000000,
711 	0x5bc0, 0x00300000, 0x00000000,
712 	0x8c04, 0xffffffff, 0x40600060,
713 	0x8c08, 0xffffffff, 0x001c001c,
714 	0x8c20, 0xffffffff, 0x00800080,
715 	0x8c24, 0xffffffff, 0x00800080,
716 	0x8c18, 0xffffffff, 0x20202078,
717 	0x8c1c, 0xffffffff, 0x00001010,
718 	0x918c, 0xffffffff, 0x00010006,
719 	0x91a8, 0xffffffff, 0x00010006,
720 	0x91c4, 0xffffffff, 0x00010006,
721 	0x91e0, 0xffffffff, 0x00010006,
722 	0x9200, 0xffffffff, 0x00010006,
723 	0x9150, 0xffffffff, 0x6e944040,
724 	0x917c, 0xffffffff, 0x00030002,
725 	0x9180, 0xffffffff, 0x00050004,
726 	0x9198, 0xffffffff, 0x00030002,
727 	0x919c, 0xffffffff, 0x00050004,
728 	0x91b4, 0xffffffff, 0x00030002,
729 	0x91b8, 0xffffffff, 0x00050004,
730 	0x91d0, 0xffffffff, 0x00030002,
731 	0x91d4, 0xffffffff, 0x00050004,
732 	0x91f0, 0xffffffff, 0x00030002,
733 	0x91f4, 0xffffffff, 0x00050004,
734 	0x915c, 0xffffffff, 0x00010000,
735 	0x9160, 0xffffffff, 0x00030002,
736 	0x3f90, 0xffff0000, 0xff000000,
737 	0x9178, 0xffffffff, 0x00070000,
738 	0x9194, 0xffffffff, 0x00070000,
739 	0x91b0, 0xffffffff, 0x00070000,
740 	0x91cc, 0xffffffff, 0x00070000,
741 	0x91ec, 0xffffffff, 0x00070000,
742 	0x9148, 0xffff0000, 0xff000000,
743 	0x9190, 0xffffffff, 0x00090008,
744 	0x91ac, 0xffffffff, 0x00090008,
745 	0x91c8, 0xffffffff, 0x00090008,
746 	0x91e4, 0xffffffff, 0x00090008,
747 	0x9204, 0xffffffff, 0x00090008,
748 	0x3f94, 0xffff0000, 0xff000000,
749 	0x914c, 0xffff0000, 0xff000000,
750 	0x929c, 0xffffffff, 0x00000001,
751 	0x8a18, 0xffffffff, 0x00000100,
752 	0x8b28, 0xffffffff, 0x00000100,
753 	0x9144, 0xffffffff, 0x00000100,
754 	0x5644, 0xffffffff, 0x00000100,
755 	0x9b7c, 0xffffffff, 0x00000000,
756 	0x8030, 0xffffffff, 0x0000100a,
757 	0x8a14, 0xffffffff, 0x00000007,
758 	0x8b24, 0xffffffff, 0x00ff0fff,
759 	0x8b10, 0xffffffff, 0x00000000,
760 	0x28a4c, 0x06000000, 0x06000000,
761 	0x4d8, 0xffffffff, 0x00000100,
762 	0x913c, 0xffff000f, 0x0100000a,
763 	0x960c, 0xffffffff, 0x54763210,
764 	0x88c4, 0xffffffff, 0x000000c2,
765 	0x88d4, 0xffffffff, 0x00000010,
766 	0x8974, 0xffffffff, 0x00000000,
767 	0xc78, 0x00000080, 0x00000080,
768 	0x5e78, 0xffffffff, 0x001000f0,
769 	0xd02c, 0xffffffff, 0x08421000,
770 	0xa008, 0xffffffff, 0x00010000,
771 	0x8d00, 0xffffffff, 0x100e4848,
772 	0x8d04, 0xffffffff, 0x00164745,
773 	0x8c00, 0xffffffff, 0xe4000003,
774 	0x8cf0, 0x1fffffff, 0x08e00620,
775 	0x28350, 0xffffffff, 0x00000000,
776 	0x9508, 0xffffffff, 0x00000002
777 };
778 
779 static const u32 sumo_golden_registers[] =
780 {
781 	0x900c, 0x00ffffff, 0x0017071f,
782 	0x8c18, 0xffffffff, 0x10101060,
783 	0x8c1c, 0xffffffff, 0x00001010,
784 	0x8c30, 0x0000000f, 0x00000005,
785 	0x9688, 0x0000000f, 0x00000007
786 };
787 
788 static const u32 wrestler_golden_registers[] =
789 {
790 	0x5eb4, 0xffffffff, 0x00000002,
791 	0x5c4, 0xffffffff, 0x00000001,
792 	0x7030, 0xffffffff, 0x00000011,
793 	0x7c30, 0xffffffff, 0x00000011,
794 	0x6104, 0x01000300, 0x00000000,
795 	0x5bc0, 0x00300000, 0x00000000,
796 	0x918c, 0xffffffff, 0x00010006,
797 	0x91a8, 0xffffffff, 0x00010006,
798 	0x9150, 0xffffffff, 0x6e944040,
799 	0x917c, 0xffffffff, 0x00030002,
800 	0x9198, 0xffffffff, 0x00030002,
801 	0x915c, 0xffffffff, 0x00010000,
802 	0x3f90, 0xffff0000, 0xff000000,
803 	0x9178, 0xffffffff, 0x00070000,
804 	0x9194, 0xffffffff, 0x00070000,
805 	0x9148, 0xffff0000, 0xff000000,
806 	0x9190, 0xffffffff, 0x00090008,
807 	0x91ac, 0xffffffff, 0x00090008,
808 	0x3f94, 0xffff0000, 0xff000000,
809 	0x914c, 0xffff0000, 0xff000000,
810 	0x929c, 0xffffffff, 0x00000001,
811 	0x8a18, 0xffffffff, 0x00000100,
812 	0x8b28, 0xffffffff, 0x00000100,
813 	0x9144, 0xffffffff, 0x00000100,
814 	0x9b7c, 0xffffffff, 0x00000000,
815 	0x8030, 0xffffffff, 0x0000100a,
816 	0x8a14, 0xffffffff, 0x00000001,
817 	0x8b24, 0xffffffff, 0x00ff0fff,
818 	0x8b10, 0xffffffff, 0x00000000,
819 	0x28a4c, 0x06000000, 0x06000000,
820 	0x4d8, 0xffffffff, 0x00000100,
821 	0x913c, 0xffff000f, 0x0100000a,
822 	0x960c, 0xffffffff, 0x54763210,
823 	0x88c4, 0xffffffff, 0x000000c2,
824 	0x88d4, 0xffffffff, 0x00000010,
825 	0x8974, 0xffffffff, 0x00000000,
826 	0xc78, 0x00000080, 0x00000080,
827 	0x5e78, 0xffffffff, 0x001000f0,
828 	0xd02c, 0xffffffff, 0x08421000,
829 	0xa008, 0xffffffff, 0x00010000,
830 	0x8d00, 0xffffffff, 0x100e4848,
831 	0x8d04, 0xffffffff, 0x00164745,
832 	0x8c00, 0xffffffff, 0xe4000003,
833 	0x8cf0, 0x1fffffff, 0x08e00410,
834 	0x28350, 0xffffffff, 0x00000000,
835 	0x9508, 0xffffffff, 0x00000002,
836 	0x900c, 0xffffffff, 0x0017071f,
837 	0x8c18, 0xffffffff, 0x10101060,
838 	0x8c1c, 0xffffffff, 0x00001010
839 };
840 
841 static const u32 barts_golden_registers[] =
842 {
843 	0x5eb4, 0xffffffff, 0x00000002,
844 	0x5e78, 0x8f311ff1, 0x001000f0,
845 	0x3f90, 0xffff0000, 0xff000000,
846 	0x9148, 0xffff0000, 0xff000000,
847 	0x3f94, 0xffff0000, 0xff000000,
848 	0x914c, 0xffff0000, 0xff000000,
849 	0xc78, 0x00000080, 0x00000080,
850 	0xbd4, 0x70073777, 0x00010001,
851 	0xd02c, 0xbfffff1f, 0x08421000,
852 	0xd0b8, 0x03773777, 0x02011003,
853 	0x5bc0, 0x00200000, 0x50100000,
854 	0x98f8, 0x33773777, 0x02011003,
855 	0x98fc, 0xffffffff, 0x76543210,
856 	0x7030, 0x31000311, 0x00000011,
857 	0x2f48, 0x00000007, 0x02011003,
858 	0x6b28, 0x00000010, 0x00000012,
859 	0x7728, 0x00000010, 0x00000012,
860 	0x10328, 0x00000010, 0x00000012,
861 	0x10f28, 0x00000010, 0x00000012,
862 	0x11b28, 0x00000010, 0x00000012,
863 	0x12728, 0x00000010, 0x00000012,
864 	0x240c, 0x000007ff, 0x00000380,
865 	0x8a14, 0xf000001f, 0x00000007,
866 	0x8b24, 0x3fff3fff, 0x00ff0fff,
867 	0x8b10, 0x0000ff0f, 0x00000000,
868 	0x28a4c, 0x07ffffff, 0x06000000,
869 	0x10c, 0x00000001, 0x00010003,
870 	0xa02c, 0xffffffff, 0x0000009b,
871 	0x913c, 0x0000000f, 0x0100000a,
872 	0x8d00, 0xffff7f7f, 0x100e4848,
873 	0x8d04, 0x00ffffff, 0x00164745,
874 	0x8c00, 0xfffc0003, 0xe4000003,
875 	0x8c04, 0xf8ff00ff, 0x40600060,
876 	0x8c08, 0x00ff00ff, 0x001c001c,
877 	0x8cf0, 0x1fff1fff, 0x08e00620,
878 	0x8c20, 0x0fff0fff, 0x00800080,
879 	0x8c24, 0x0fff0fff, 0x00800080,
880 	0x8c18, 0xffffffff, 0x20202078,
881 	0x8c1c, 0x0000ffff, 0x00001010,
882 	0x28350, 0x00000f01, 0x00000000,
883 	0x9508, 0x3700001f, 0x00000002,
884 	0x960c, 0xffffffff, 0x54763210,
885 	0x88c4, 0x001f3ae3, 0x000000c2,
886 	0x88d4, 0x0000001f, 0x00000010,
887 	0x8974, 0xffffffff, 0x00000000
888 };
889 
890 static const u32 turks_golden_registers[] =
891 {
892 	0x5eb4, 0xffffffff, 0x00000002,
893 	0x5e78, 0x8f311ff1, 0x001000f0,
894 	0x8c8, 0x00003000, 0x00001070,
895 	0x8cc, 0x000fffff, 0x00040035,
896 	0x3f90, 0xffff0000, 0xfff00000,
897 	0x9148, 0xffff0000, 0xfff00000,
898 	0x3f94, 0xffff0000, 0xfff00000,
899 	0x914c, 0xffff0000, 0xfff00000,
900 	0xc78, 0x00000080, 0x00000080,
901 	0xbd4, 0x00073007, 0x00010002,
902 	0xd02c, 0xbfffff1f, 0x08421000,
903 	0xd0b8, 0x03773777, 0x02010002,
904 	0x5bc0, 0x00200000, 0x50100000,
905 	0x98f8, 0x33773777, 0x00010002,
906 	0x98fc, 0xffffffff, 0x33221100,
907 	0x7030, 0x31000311, 0x00000011,
908 	0x2f48, 0x33773777, 0x00010002,
909 	0x6b28, 0x00000010, 0x00000012,
910 	0x7728, 0x00000010, 0x00000012,
911 	0x10328, 0x00000010, 0x00000012,
912 	0x10f28, 0x00000010, 0x00000012,
913 	0x11b28, 0x00000010, 0x00000012,
914 	0x12728, 0x00000010, 0x00000012,
915 	0x240c, 0x000007ff, 0x00000380,
916 	0x8a14, 0xf000001f, 0x00000007,
917 	0x8b24, 0x3fff3fff, 0x00ff0fff,
918 	0x8b10, 0x0000ff0f, 0x00000000,
919 	0x28a4c, 0x07ffffff, 0x06000000,
920 	0x10c, 0x00000001, 0x00010003,
921 	0xa02c, 0xffffffff, 0x0000009b,
922 	0x913c, 0x0000000f, 0x0100000a,
923 	0x8d00, 0xffff7f7f, 0x100e4848,
924 	0x8d04, 0x00ffffff, 0x00164745,
925 	0x8c00, 0xfffc0003, 0xe4000003,
926 	0x8c04, 0xf8ff00ff, 0x40600060,
927 	0x8c08, 0x00ff00ff, 0x001c001c,
928 	0x8cf0, 0x1fff1fff, 0x08e00410,
929 	0x8c20, 0x0fff0fff, 0x00800080,
930 	0x8c24, 0x0fff0fff, 0x00800080,
931 	0x8c18, 0xffffffff, 0x20202078,
932 	0x8c1c, 0x0000ffff, 0x00001010,
933 	0x28350, 0x00000f01, 0x00000000,
934 	0x9508, 0x3700001f, 0x00000002,
935 	0x960c, 0xffffffff, 0x54763210,
936 	0x88c4, 0x001f3ae3, 0x000000c2,
937 	0x88d4, 0x0000001f, 0x00000010,
938 	0x8974, 0xffffffff, 0x00000000
939 };
940 
941 static const u32 caicos_golden_registers[] =
942 {
943 	0x5eb4, 0xffffffff, 0x00000002,
944 	0x5e78, 0x8f311ff1, 0x001000f0,
945 	0x8c8, 0x00003420, 0x00001450,
946 	0x8cc, 0x000fffff, 0x00040035,
947 	0x3f90, 0xffff0000, 0xfffc0000,
948 	0x9148, 0xffff0000, 0xfffc0000,
949 	0x3f94, 0xffff0000, 0xfffc0000,
950 	0x914c, 0xffff0000, 0xfffc0000,
951 	0xc78, 0x00000080, 0x00000080,
952 	0xbd4, 0x00073007, 0x00010001,
953 	0xd02c, 0xbfffff1f, 0x08421000,
954 	0xd0b8, 0x03773777, 0x02010001,
955 	0x5bc0, 0x00200000, 0x50100000,
956 	0x98f8, 0x33773777, 0x02010001,
957 	0x98fc, 0xffffffff, 0x33221100,
958 	0x7030, 0x31000311, 0x00000011,
959 	0x2f48, 0x33773777, 0x02010001,
960 	0x6b28, 0x00000010, 0x00000012,
961 	0x7728, 0x00000010, 0x00000012,
962 	0x10328, 0x00000010, 0x00000012,
963 	0x10f28, 0x00000010, 0x00000012,
964 	0x11b28, 0x00000010, 0x00000012,
965 	0x12728, 0x00000010, 0x00000012,
966 	0x240c, 0x000007ff, 0x00000380,
967 	0x8a14, 0xf000001f, 0x00000001,
968 	0x8b24, 0x3fff3fff, 0x00ff0fff,
969 	0x8b10, 0x0000ff0f, 0x00000000,
970 	0x28a4c, 0x07ffffff, 0x06000000,
971 	0x10c, 0x00000001, 0x00010003,
972 	0xa02c, 0xffffffff, 0x0000009b,
973 	0x913c, 0x0000000f, 0x0100000a,
974 	0x8d00, 0xffff7f7f, 0x100e4848,
975 	0x8d04, 0x00ffffff, 0x00164745,
976 	0x8c00, 0xfffc0003, 0xe4000003,
977 	0x8c04, 0xf8ff00ff, 0x40600060,
978 	0x8c08, 0x00ff00ff, 0x001c001c,
979 	0x8cf0, 0x1fff1fff, 0x08e00410,
980 	0x8c20, 0x0fff0fff, 0x00800080,
981 	0x8c24, 0x0fff0fff, 0x00800080,
982 	0x8c18, 0xffffffff, 0x20202078,
983 	0x8c1c, 0x0000ffff, 0x00001010,
984 	0x28350, 0x00000f01, 0x00000000,
985 	0x9508, 0x3700001f, 0x00000002,
986 	0x960c, 0xffffffff, 0x54763210,
987 	0x88c4, 0x001f3ae3, 0x000000c2,
988 	0x88d4, 0x0000001f, 0x00000010,
989 	0x8974, 0xffffffff, 0x00000000
990 };
991 
992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994 	switch (rdev->family) {
995 	case CHIP_CYPRESS:
996 	case CHIP_HEMLOCK:
997 		radeon_program_register_sequence(rdev,
998 						 evergreen_golden_registers,
999 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000 		radeon_program_register_sequence(rdev,
1001 						 evergreen_golden_registers2,
1002 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003 		radeon_program_register_sequence(rdev,
1004 						 cypress_mgcg_init,
1005 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006 		break;
1007 	case CHIP_JUNIPER:
1008 		radeon_program_register_sequence(rdev,
1009 						 evergreen_golden_registers,
1010 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011 		radeon_program_register_sequence(rdev,
1012 						 evergreen_golden_registers2,
1013 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014 		radeon_program_register_sequence(rdev,
1015 						 juniper_mgcg_init,
1016 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017 		break;
1018 	case CHIP_REDWOOD:
1019 		radeon_program_register_sequence(rdev,
1020 						 evergreen_golden_registers,
1021 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022 		radeon_program_register_sequence(rdev,
1023 						 evergreen_golden_registers2,
1024 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025 		radeon_program_register_sequence(rdev,
1026 						 redwood_mgcg_init,
1027 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028 		break;
1029 	case CHIP_CEDAR:
1030 		radeon_program_register_sequence(rdev,
1031 						 cedar_golden_registers,
1032 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033 		radeon_program_register_sequence(rdev,
1034 						 evergreen_golden_registers2,
1035 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036 		radeon_program_register_sequence(rdev,
1037 						 cedar_mgcg_init,
1038 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039 		break;
1040 	case CHIP_PALM:
1041 		radeon_program_register_sequence(rdev,
1042 						 wrestler_golden_registers,
1043 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044 		break;
1045 	case CHIP_SUMO:
1046 		radeon_program_register_sequence(rdev,
1047 						 supersumo_golden_registers,
1048 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049 		break;
1050 	case CHIP_SUMO2:
1051 		radeon_program_register_sequence(rdev,
1052 						 supersumo_golden_registers,
1053 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 		radeon_program_register_sequence(rdev,
1055 						 sumo_golden_registers,
1056 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057 		break;
1058 	case CHIP_BARTS:
1059 		radeon_program_register_sequence(rdev,
1060 						 barts_golden_registers,
1061 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1062 		break;
1063 	case CHIP_TURKS:
1064 		radeon_program_register_sequence(rdev,
1065 						 turks_golden_registers,
1066 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1067 		break;
1068 	case CHIP_CAICOS:
1069 		radeon_program_register_sequence(rdev,
1070 						 caicos_golden_registers,
1071 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072 		break;
1073 	default:
1074 		break;
1075 	}
1076 }
1077 
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089 					u32 reg, u32 *val)
1090 {
1091 	switch (reg) {
1092 	case GRBM_STATUS:
1093 	case GRBM_STATUS_SE0:
1094 	case GRBM_STATUS_SE1:
1095 	case SRBM_STATUS:
1096 	case SRBM_STATUS2:
1097 	case DMA_STATUS_REG:
1098 	case UVD_STATUS:
1099 		*val = RREG32(reg);
1100 		return 0;
1101 	default:
1102 		return -EINVAL;
1103 	}
1104 }
1105 
1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107 			     unsigned *bankh, unsigned *mtaspect,
1108 			     unsigned *tile_split)
1109 {
1110 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114 	switch (*bankw) {
1115 	default:
1116 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120 	}
1121 	switch (*bankh) {
1122 	default:
1123 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127 	}
1128 	switch (*mtaspect) {
1129 	default:
1130 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134 	}
1135 }
1136 
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138 			      u32 cntl_reg, u32 status_reg)
1139 {
1140 	int r, i;
1141 	struct atom_clock_dividers dividers;
1142 
1143 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144 					   clock, false, &dividers);
1145 	if (r)
1146 		return r;
1147 
1148 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149 
1150 	for (i = 0; i < 100; i++) {
1151 		if (RREG32(status_reg) & DCLK_STATUS)
1152 			break;
1153 		mdelay(10);
1154 	}
1155 	if (i == 100)
1156 		return -ETIMEDOUT;
1157 
1158 	return 0;
1159 }
1160 
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163 	int r = 0;
1164 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1165 
1166 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167 	if (r)
1168 		goto done;
1169 	cg_scratch &= 0xffff0000;
1170 	cg_scratch |= vclk / 100; /* Mhz */
1171 
1172 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173 	if (r)
1174 		goto done;
1175 	cg_scratch &= 0x0000ffff;
1176 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177 
1178 done:
1179 	WREG32(CG_SCRATCH1, cg_scratch);
1180 
1181 	return r;
1182 }
1183 
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186 	/* start off with something large */
1187 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188 	int r;
1189 
1190 	/* bypass vclk and dclk with bclk */
1191 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194 
1195 	/* put PLL in bypass mode */
1196 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197 
1198 	if (!vclk || !dclk) {
1199 		/* keep the Bypass mode, put PLL to sleep */
1200 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201 		return 0;
1202 	}
1203 
1204 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205 					  16384, 0x03FFFFFF, 0, 128, 5,
1206 					  &fb_div, &vclk_div, &dclk_div);
1207 	if (r)
1208 		return r;
1209 
1210 	/* set VCO_MODE to 1 */
1211 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212 
1213 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1214 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216 
1217 	/* deassert UPLL_RESET */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219 
1220 	mdelay(1);
1221 
1222 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223 	if (r)
1224 		return r;
1225 
1226 	/* assert UPLL_RESET again */
1227 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228 
1229 	/* disable spread spectrum. */
1230 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231 
1232 	/* set feedback divider */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234 
1235 	/* set ref divider to 0 */
1236 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237 
1238 	if (fb_div < 307200)
1239 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240 	else
1241 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242 
1243 	/* set PDIV_A and PDIV_B */
1244 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247 
1248 	/* give the PLL some time to settle */
1249 	mdelay(15);
1250 
1251 	/* deassert PLL_RESET */
1252 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253 
1254 	mdelay(15);
1255 
1256 	/* switch from bypass mode to normal mode */
1257 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258 
1259 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260 	if (r)
1261 		return r;
1262 
1263 	/* switch VCLK and DCLK selection */
1264 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267 
1268 	mdelay(100);
1269 
1270 	return 0;
1271 }
1272 
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275 	int readrq;
1276 	u16 v;
1277 
1278 	readrq = pcie_get_readrq(rdev->pdev);
1279 	v = ffs(readrq) - 8;
1280 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281 	 * to avoid hangs or perfomance issues
1282 	 */
1283 	if ((v == 0) || (v == 6) || (v == 7))
1284 		pcie_set_readrq(rdev->pdev, 512);
1285 }
1286 
1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289 	struct drm_device *dev = encoder->dev;
1290 	struct radeon_device *rdev = dev->dev_private;
1291 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294 	int bpc = 0;
1295 	u32 tmp = 0;
1296 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297 
1298 	if (connector) {
1299 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300 		bpc = radeon_get_monitor_bpc(connector);
1301 		dither = radeon_connector->dither;
1302 	}
1303 
1304 	/* LVDS/eDP FMT is set up by atom */
1305 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306 		return;
1307 
1308 	/* not needed for analog */
1309 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311 		return;
1312 
1313 	if (bpc == 0)
1314 		return;
1315 
1316 	switch (bpc) {
1317 	case 6:
1318 		if (dither == RADEON_FMT_DITHER_ENABLE)
1319 			/* XXX sort out optimal dither settings */
1320 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321 				FMT_SPATIAL_DITHER_EN);
1322 		else
1323 			tmp |= FMT_TRUNCATE_EN;
1324 		break;
1325 	case 8:
1326 		if (dither == RADEON_FMT_DITHER_ENABLE)
1327 			/* XXX sort out optimal dither settings */
1328 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 				FMT_RGB_RANDOM_ENABLE |
1330 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331 		else
1332 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333 		break;
1334 	case 10:
1335 	default:
1336 		/* not needed */
1337 		break;
1338 	}
1339 
1340 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342 
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346 		return true;
1347 	else
1348 		return false;
1349 }
1350 
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353 	u32 pos1, pos2;
1354 
1355 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357 
1358 	if (pos1 != pos2)
1359 		return true;
1360 	else
1361 		return false;
1362 }
1363 
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374 	unsigned i = 0;
1375 
1376 	if (crtc >= rdev->num_crtc)
1377 		return;
1378 
1379 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380 		return;
1381 
1382 	/* depending on when we hit vblank, we may be close to active; if so,
1383 	 * wait for another frame.
1384 	 */
1385 	while (dce4_is_in_vblank(rdev, crtc)) {
1386 		if (i++ % 100 == 0) {
1387 			if (!dce4_is_counter_moving(rdev, crtc))
1388 				break;
1389 		}
1390 	}
1391 
1392 	while (!dce4_is_in_vblank(rdev, crtc)) {
1393 		if (i++ % 100 == 0) {
1394 			if (!dce4_is_counter_moving(rdev, crtc))
1395 				break;
1396 		}
1397 	}
1398 }
1399 
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1411 			 bool async)
1412 {
1413 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1414 
1415 	/* update the scanout addresses */
1416 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1417 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1418 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1419 	       upper_32_bits(crtc_base));
1420 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1421 	       (u32)crtc_base);
1422 	/* post the write */
1423 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1424 }
1425 
1426 /**
1427  * evergreen_page_flip_pending - check if page flip is still pending
1428  *
1429  * @rdev: radeon_device pointer
1430  * @crtc_id: crtc to check
1431  *
1432  * Returns the current update pending status.
1433  */
1434 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1435 {
1436 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1437 
1438 	/* Return current update_pending status: */
1439 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1440 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1441 }
1442 
1443 /* get temperature in millidegrees */
1444 int evergreen_get_temp(struct radeon_device *rdev)
1445 {
1446 	u32 temp, toffset;
1447 	int actual_temp = 0;
1448 
1449 	if (rdev->family == CHIP_JUNIPER) {
1450 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1451 			TOFFSET_SHIFT;
1452 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1453 			TS0_ADC_DOUT_SHIFT;
1454 
1455 		if (toffset & 0x100)
1456 			actual_temp = temp / 2 - (0x200 - toffset);
1457 		else
1458 			actual_temp = temp / 2 + toffset;
1459 
1460 		actual_temp = actual_temp * 1000;
1461 
1462 	} else {
1463 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1464 			ASIC_T_SHIFT;
1465 
1466 		if (temp & 0x400)
1467 			actual_temp = -256;
1468 		else if (temp & 0x200)
1469 			actual_temp = 255;
1470 		else if (temp & 0x100) {
1471 			actual_temp = temp & 0x1ff;
1472 			actual_temp |= ~0x1ff;
1473 		} else
1474 			actual_temp = temp & 0xff;
1475 
1476 		actual_temp = (actual_temp * 1000) / 2;
1477 	}
1478 
1479 	return actual_temp;
1480 }
1481 
1482 int sumo_get_temp(struct radeon_device *rdev)
1483 {
1484 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1485 	int actual_temp = temp - 49;
1486 
1487 	return actual_temp * 1000;
1488 }
1489 
1490 /**
1491  * sumo_pm_init_profile - Initialize power profiles callback.
1492  *
1493  * @rdev: radeon_device pointer
1494  *
1495  * Initialize the power states used in profile mode
1496  * (sumo, trinity, SI).
1497  * Used for profile mode only.
1498  */
1499 void sumo_pm_init_profile(struct radeon_device *rdev)
1500 {
1501 	int idx;
1502 
1503 	/* default */
1504 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1508 
1509 	/* low,mid sh/mh */
1510 	if (rdev->flags & RADEON_IS_MOBILITY)
1511 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1512 	else
1513 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1514 
1515 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1516 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1517 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1518 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1519 
1520 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1521 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1522 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1523 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1524 
1525 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1526 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1527 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1528 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1529 
1530 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1531 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1532 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1533 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1534 
1535 	/* high sh/mh */
1536 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1537 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1538 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1539 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1540 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1541 		rdev->pm.power_state[idx].num_clock_modes - 1;
1542 
1543 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1544 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1545 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1546 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1547 		rdev->pm.power_state[idx].num_clock_modes - 1;
1548 }
1549 
1550 /**
1551  * btc_pm_init_profile - Initialize power profiles callback.
1552  *
1553  * @rdev: radeon_device pointer
1554  *
1555  * Initialize the power states used in profile mode
1556  * (BTC, cayman).
1557  * Used for profile mode only.
1558  */
1559 void btc_pm_init_profile(struct radeon_device *rdev)
1560 {
1561 	int idx;
1562 
1563 	/* default */
1564 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1565 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1566 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1567 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1568 	/* starting with BTC, there is one state that is used for both
1569 	 * MH and SH.  Difference is that we always use the high clock index for
1570 	 * mclk.
1571 	 */
1572 	if (rdev->flags & RADEON_IS_MOBILITY)
1573 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1574 	else
1575 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1576 	/* low sh */
1577 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1578 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1580 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1581 	/* mid sh */
1582 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1583 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1584 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1585 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1586 	/* high sh */
1587 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1588 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1589 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1590 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1591 	/* low mh */
1592 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1593 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1594 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1595 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1596 	/* mid mh */
1597 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1598 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1599 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1600 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1601 	/* high mh */
1602 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1603 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1604 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1605 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1606 }
1607 
1608 /**
1609  * evergreen_pm_misc - set additional pm hw parameters callback.
1610  *
1611  * @rdev: radeon_device pointer
1612  *
1613  * Set non-clock parameters associated with a power state
1614  * (voltage, etc.) (evergreen+).
1615  */
1616 void evergreen_pm_misc(struct radeon_device *rdev)
1617 {
1618 	int req_ps_idx = rdev->pm.requested_power_state_index;
1619 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1620 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1621 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1622 
1623 	if (voltage->type == VOLTAGE_SW) {
1624 		/* 0xff0x are flags rather then an actual voltage */
1625 		if ((voltage->voltage & 0xff00) == 0xff00)
1626 			return;
1627 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1628 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1629 			rdev->pm.current_vddc = voltage->voltage;
1630 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1631 		}
1632 
1633 		/* starting with BTC, there is one state that is used for both
1634 		 * MH and SH.  Difference is that we always use the high clock index for
1635 		 * mclk and vddci.
1636 		 */
1637 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1638 		    (rdev->family >= CHIP_BARTS) &&
1639 		    rdev->pm.active_crtc_count &&
1640 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1641 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1642 			voltage = &rdev->pm.power_state[req_ps_idx].
1643 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1644 
1645 		/* 0xff0x are flags rather then an actual voltage */
1646 		if ((voltage->vddci & 0xff00) == 0xff00)
1647 			return;
1648 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1649 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1650 			rdev->pm.current_vddci = voltage->vddci;
1651 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1652 		}
1653 	}
1654 }
1655 
1656 /**
1657  * evergreen_pm_prepare - pre-power state change callback.
1658  *
1659  * @rdev: radeon_device pointer
1660  *
1661  * Prepare for a power state change (evergreen+).
1662  */
1663 void evergreen_pm_prepare(struct radeon_device *rdev)
1664 {
1665 	struct drm_device *ddev = rdev->ddev;
1666 	struct drm_crtc *crtc;
1667 	struct radeon_crtc *radeon_crtc;
1668 	u32 tmp;
1669 
1670 	/* disable any active CRTCs */
1671 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1672 		radeon_crtc = to_radeon_crtc(crtc);
1673 		if (radeon_crtc->enabled) {
1674 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1675 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1676 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1677 		}
1678 	}
1679 }
1680 
1681 /**
1682  * evergreen_pm_finish - post-power state change callback.
1683  *
1684  * @rdev: radeon_device pointer
1685  *
1686  * Clean up after a power state change (evergreen+).
1687  */
1688 void evergreen_pm_finish(struct radeon_device *rdev)
1689 {
1690 	struct drm_device *ddev = rdev->ddev;
1691 	struct drm_crtc *crtc;
1692 	struct radeon_crtc *radeon_crtc;
1693 	u32 tmp;
1694 
1695 	/* enable any active CRTCs */
1696 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1697 		radeon_crtc = to_radeon_crtc(crtc);
1698 		if (radeon_crtc->enabled) {
1699 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1700 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1701 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1702 		}
1703 	}
1704 }
1705 
1706 /**
1707  * evergreen_hpd_sense - hpd sense callback.
1708  *
1709  * @rdev: radeon_device pointer
1710  * @hpd: hpd (hotplug detect) pin
1711  *
1712  * Checks if a digital monitor is connected (evergreen+).
1713  * Returns true if connected, false if not connected.
1714  */
1715 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1716 {
1717 	bool connected = false;
1718 
1719 	switch (hpd) {
1720 	case RADEON_HPD_1:
1721 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1722 			connected = true;
1723 		break;
1724 	case RADEON_HPD_2:
1725 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1726 			connected = true;
1727 		break;
1728 	case RADEON_HPD_3:
1729 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1730 			connected = true;
1731 		break;
1732 	case RADEON_HPD_4:
1733 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1734 			connected = true;
1735 		break;
1736 	case RADEON_HPD_5:
1737 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1738 			connected = true;
1739 		break;
1740 	case RADEON_HPD_6:
1741 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1742 			connected = true;
1743 		break;
1744 	default:
1745 		break;
1746 	}
1747 
1748 	return connected;
1749 }
1750 
1751 /**
1752  * evergreen_hpd_set_polarity - hpd set polarity callback.
1753  *
1754  * @rdev: radeon_device pointer
1755  * @hpd: hpd (hotplug detect) pin
1756  *
1757  * Set the polarity of the hpd pin (evergreen+).
1758  */
1759 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1760 				enum radeon_hpd_id hpd)
1761 {
1762 	u32 tmp;
1763 	bool connected = evergreen_hpd_sense(rdev, hpd);
1764 
1765 	switch (hpd) {
1766 	case RADEON_HPD_1:
1767 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1768 		if (connected)
1769 			tmp &= ~DC_HPDx_INT_POLARITY;
1770 		else
1771 			tmp |= DC_HPDx_INT_POLARITY;
1772 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1773 		break;
1774 	case RADEON_HPD_2:
1775 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1776 		if (connected)
1777 			tmp &= ~DC_HPDx_INT_POLARITY;
1778 		else
1779 			tmp |= DC_HPDx_INT_POLARITY;
1780 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1781 		break;
1782 	case RADEON_HPD_3:
1783 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1784 		if (connected)
1785 			tmp &= ~DC_HPDx_INT_POLARITY;
1786 		else
1787 			tmp |= DC_HPDx_INT_POLARITY;
1788 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1789 		break;
1790 	case RADEON_HPD_4:
1791 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1792 		if (connected)
1793 			tmp &= ~DC_HPDx_INT_POLARITY;
1794 		else
1795 			tmp |= DC_HPDx_INT_POLARITY;
1796 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1797 		break;
1798 	case RADEON_HPD_5:
1799 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1800 		if (connected)
1801 			tmp &= ~DC_HPDx_INT_POLARITY;
1802 		else
1803 			tmp |= DC_HPDx_INT_POLARITY;
1804 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1805 			break;
1806 	case RADEON_HPD_6:
1807 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1808 		if (connected)
1809 			tmp &= ~DC_HPDx_INT_POLARITY;
1810 		else
1811 			tmp |= DC_HPDx_INT_POLARITY;
1812 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1813 		break;
1814 	default:
1815 		break;
1816 	}
1817 }
1818 
1819 /**
1820  * evergreen_hpd_init - hpd setup callback.
1821  *
1822  * @rdev: radeon_device pointer
1823  *
1824  * Setup the hpd pins used by the card (evergreen+).
1825  * Enable the pin, set the polarity, and enable the hpd interrupts.
1826  */
1827 void evergreen_hpd_init(struct radeon_device *rdev)
1828 {
1829 	struct drm_device *dev = rdev->ddev;
1830 	struct drm_connector *connector;
1831 	unsigned enabled = 0;
1832 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1833 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1834 
1835 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1836 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1837 
1838 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1839 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1840 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1841 			 * aux dp channel on imac and help (but not completely fix)
1842 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1843 			 * also avoid interrupt storms during dpms.
1844 			 */
1845 			continue;
1846 		}
1847 		switch (radeon_connector->hpd.hpd) {
1848 		case RADEON_HPD_1:
1849 			WREG32(DC_HPD1_CONTROL, tmp);
1850 			break;
1851 		case RADEON_HPD_2:
1852 			WREG32(DC_HPD2_CONTROL, tmp);
1853 			break;
1854 		case RADEON_HPD_3:
1855 			WREG32(DC_HPD3_CONTROL, tmp);
1856 			break;
1857 		case RADEON_HPD_4:
1858 			WREG32(DC_HPD4_CONTROL, tmp);
1859 			break;
1860 		case RADEON_HPD_5:
1861 			WREG32(DC_HPD5_CONTROL, tmp);
1862 			break;
1863 		case RADEON_HPD_6:
1864 			WREG32(DC_HPD6_CONTROL, tmp);
1865 			break;
1866 		default:
1867 			break;
1868 		}
1869 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1870 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1871 			enabled |= 1 << radeon_connector->hpd.hpd;
1872 	}
1873 	radeon_irq_kms_enable_hpd(rdev, enabled);
1874 }
1875 
1876 /**
1877  * evergreen_hpd_fini - hpd tear down callback.
1878  *
1879  * @rdev: radeon_device pointer
1880  *
1881  * Tear down the hpd pins used by the card (evergreen+).
1882  * Disable the hpd interrupts.
1883  */
1884 void evergreen_hpd_fini(struct radeon_device *rdev)
1885 {
1886 	struct drm_device *dev = rdev->ddev;
1887 	struct drm_connector *connector;
1888 	unsigned disabled = 0;
1889 
1890 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1891 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1892 		switch (radeon_connector->hpd.hpd) {
1893 		case RADEON_HPD_1:
1894 			WREG32(DC_HPD1_CONTROL, 0);
1895 			break;
1896 		case RADEON_HPD_2:
1897 			WREG32(DC_HPD2_CONTROL, 0);
1898 			break;
1899 		case RADEON_HPD_3:
1900 			WREG32(DC_HPD3_CONTROL, 0);
1901 			break;
1902 		case RADEON_HPD_4:
1903 			WREG32(DC_HPD4_CONTROL, 0);
1904 			break;
1905 		case RADEON_HPD_5:
1906 			WREG32(DC_HPD5_CONTROL, 0);
1907 			break;
1908 		case RADEON_HPD_6:
1909 			WREG32(DC_HPD6_CONTROL, 0);
1910 			break;
1911 		default:
1912 			break;
1913 		}
1914 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1915 			disabled |= 1 << radeon_connector->hpd.hpd;
1916 	}
1917 	radeon_irq_kms_disable_hpd(rdev, disabled);
1918 }
1919 
1920 /* watermark setup */
1921 
1922 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1923 					struct radeon_crtc *radeon_crtc,
1924 					struct drm_display_mode *mode,
1925 					struct drm_display_mode *other_mode)
1926 {
1927 	u32 tmp, buffer_alloc, i;
1928 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1929 	/*
1930 	 * Line Buffer Setup
1931 	 * There are 3 line buffers, each one shared by 2 display controllers.
1932 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1933 	 * the display controllers.  The paritioning is done via one of four
1934 	 * preset allocations specified in bits 2:0:
1935 	 * first display controller
1936 	 *  0 - first half of lb (3840 * 2)
1937 	 *  1 - first 3/4 of lb (5760 * 2)
1938 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1939 	 *  3 - first 1/4 of lb (1920 * 2)
1940 	 * second display controller
1941 	 *  4 - second half of lb (3840 * 2)
1942 	 *  5 - second 3/4 of lb (5760 * 2)
1943 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1944 	 *  7 - last 1/4 of lb (1920 * 2)
1945 	 */
1946 	/* this can get tricky if we have two large displays on a paired group
1947 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1948 	 * non-linked crtcs for maximum line buffer allocation.
1949 	 */
1950 	if (radeon_crtc->base.enabled && mode) {
1951 		if (other_mode) {
1952 			tmp = 0; /* 1/2 */
1953 			buffer_alloc = 1;
1954 		} else {
1955 			tmp = 2; /* whole */
1956 			buffer_alloc = 2;
1957 		}
1958 	} else {
1959 		tmp = 0;
1960 		buffer_alloc = 0;
1961 	}
1962 
1963 	/* second controller of the pair uses second half of the lb */
1964 	if (radeon_crtc->crtc_id % 2)
1965 		tmp += 4;
1966 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1967 
1968 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1969 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1970 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1971 		for (i = 0; i < rdev->usec_timeout; i++) {
1972 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1973 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1974 				break;
1975 			udelay(1);
1976 		}
1977 	}
1978 
1979 	if (radeon_crtc->base.enabled && mode) {
1980 		switch (tmp) {
1981 		case 0:
1982 		case 4:
1983 		default:
1984 			if (ASIC_IS_DCE5(rdev))
1985 				return 4096 * 2;
1986 			else
1987 				return 3840 * 2;
1988 		case 1:
1989 		case 5:
1990 			if (ASIC_IS_DCE5(rdev))
1991 				return 6144 * 2;
1992 			else
1993 				return 5760 * 2;
1994 		case 2:
1995 		case 6:
1996 			if (ASIC_IS_DCE5(rdev))
1997 				return 8192 * 2;
1998 			else
1999 				return 7680 * 2;
2000 		case 3:
2001 		case 7:
2002 			if (ASIC_IS_DCE5(rdev))
2003 				return 2048 * 2;
2004 			else
2005 				return 1920 * 2;
2006 		}
2007 	}
2008 
2009 	/* controller not enabled, so no lb used */
2010 	return 0;
2011 }
2012 
2013 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2014 {
2015 	u32 tmp = RREG32(MC_SHARED_CHMAP);
2016 
2017 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2018 	case 0:
2019 	default:
2020 		return 1;
2021 	case 1:
2022 		return 2;
2023 	case 2:
2024 		return 4;
2025 	case 3:
2026 		return 8;
2027 	}
2028 }
2029 
2030 struct evergreen_wm_params {
2031 	u32 dram_channels; /* number of dram channels */
2032 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2033 	u32 sclk;          /* engine clock in kHz */
2034 	u32 disp_clk;      /* display clock in kHz */
2035 	u32 src_width;     /* viewport width */
2036 	u32 active_time;   /* active display time in ns */
2037 	u32 blank_time;    /* blank time in ns */
2038 	bool interlaced;    /* mode is interlaced */
2039 	fixed20_12 vsc;    /* vertical scale ratio */
2040 	u32 num_heads;     /* number of active crtcs */
2041 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2042 	u32 lb_size;       /* line buffer allocated to pipe */
2043 	u32 vtaps;         /* vertical scaler taps */
2044 };
2045 
2046 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2047 {
2048 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2049 	fixed20_12 dram_efficiency; /* 0.7 */
2050 	fixed20_12 yclk, dram_channels, bandwidth;
2051 	fixed20_12 a;
2052 
2053 	a.full = dfixed_const(1000);
2054 	yclk.full = dfixed_const(wm->yclk);
2055 	yclk.full = dfixed_div(yclk, a);
2056 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2057 	a.full = dfixed_const(10);
2058 	dram_efficiency.full = dfixed_const(7);
2059 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2060 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2061 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2062 
2063 	return dfixed_trunc(bandwidth);
2064 }
2065 
2066 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2067 {
2068 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2069 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2070 	fixed20_12 yclk, dram_channels, bandwidth;
2071 	fixed20_12 a;
2072 
2073 	a.full = dfixed_const(1000);
2074 	yclk.full = dfixed_const(wm->yclk);
2075 	yclk.full = dfixed_div(yclk, a);
2076 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2077 	a.full = dfixed_const(10);
2078 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2079 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2080 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2081 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2082 
2083 	return dfixed_trunc(bandwidth);
2084 }
2085 
2086 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2087 {
2088 	/* Calculate the display Data return Bandwidth */
2089 	fixed20_12 return_efficiency; /* 0.8 */
2090 	fixed20_12 sclk, bandwidth;
2091 	fixed20_12 a;
2092 
2093 	a.full = dfixed_const(1000);
2094 	sclk.full = dfixed_const(wm->sclk);
2095 	sclk.full = dfixed_div(sclk, a);
2096 	a.full = dfixed_const(10);
2097 	return_efficiency.full = dfixed_const(8);
2098 	return_efficiency.full = dfixed_div(return_efficiency, a);
2099 	a.full = dfixed_const(32);
2100 	bandwidth.full = dfixed_mul(a, sclk);
2101 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2102 
2103 	return dfixed_trunc(bandwidth);
2104 }
2105 
2106 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2107 {
2108 	/* Calculate the DMIF Request Bandwidth */
2109 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2110 	fixed20_12 disp_clk, bandwidth;
2111 	fixed20_12 a;
2112 
2113 	a.full = dfixed_const(1000);
2114 	disp_clk.full = dfixed_const(wm->disp_clk);
2115 	disp_clk.full = dfixed_div(disp_clk, a);
2116 	a.full = dfixed_const(10);
2117 	disp_clk_request_efficiency.full = dfixed_const(8);
2118 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2119 	a.full = dfixed_const(32);
2120 	bandwidth.full = dfixed_mul(a, disp_clk);
2121 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2122 
2123 	return dfixed_trunc(bandwidth);
2124 }
2125 
2126 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2127 {
2128 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2129 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2130 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2131 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2132 
2133 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2134 }
2135 
2136 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2137 {
2138 	/* Calculate the display mode Average Bandwidth
2139 	 * DisplayMode should contain the source and destination dimensions,
2140 	 * timing, etc.
2141 	 */
2142 	fixed20_12 bpp;
2143 	fixed20_12 line_time;
2144 	fixed20_12 src_width;
2145 	fixed20_12 bandwidth;
2146 	fixed20_12 a;
2147 
2148 	a.full = dfixed_const(1000);
2149 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2150 	line_time.full = dfixed_div(line_time, a);
2151 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2152 	src_width.full = dfixed_const(wm->src_width);
2153 	bandwidth.full = dfixed_mul(src_width, bpp);
2154 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2155 	bandwidth.full = dfixed_div(bandwidth, line_time);
2156 
2157 	return dfixed_trunc(bandwidth);
2158 }
2159 
2160 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2161 {
2162 	/* First calcualte the latency in ns */
2163 	u32 mc_latency = 2000; /* 2000 ns. */
2164 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2165 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2166 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2167 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2168 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2169 		(wm->num_heads * cursor_line_pair_return_time);
2170 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2171 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2172 	fixed20_12 a, b, c;
2173 
2174 	if (wm->num_heads == 0)
2175 		return 0;
2176 
2177 	a.full = dfixed_const(2);
2178 	b.full = dfixed_const(1);
2179 	if ((wm->vsc.full > a.full) ||
2180 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2181 	    (wm->vtaps >= 5) ||
2182 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2183 		max_src_lines_per_dst_line = 4;
2184 	else
2185 		max_src_lines_per_dst_line = 2;
2186 
2187 	a.full = dfixed_const(available_bandwidth);
2188 	b.full = dfixed_const(wm->num_heads);
2189 	a.full = dfixed_div(a, b);
2190 
2191 	b.full = dfixed_const(1000);
2192 	c.full = dfixed_const(wm->disp_clk);
2193 	b.full = dfixed_div(c, b);
2194 	c.full = dfixed_const(wm->bytes_per_pixel);
2195 	b.full = dfixed_mul(b, c);
2196 
2197 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2198 
2199 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2200 	b.full = dfixed_const(1000);
2201 	c.full = dfixed_const(lb_fill_bw);
2202 	b.full = dfixed_div(c, b);
2203 	a.full = dfixed_div(a, b);
2204 	line_fill_time = dfixed_trunc(a);
2205 
2206 	if (line_fill_time < wm->active_time)
2207 		return latency;
2208 	else
2209 		return latency + (line_fill_time - wm->active_time);
2210 
2211 }
2212 
2213 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2214 {
2215 	if (evergreen_average_bandwidth(wm) <=
2216 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2217 		return true;
2218 	else
2219 		return false;
2220 };
2221 
2222 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2223 {
2224 	if (evergreen_average_bandwidth(wm) <=
2225 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2226 		return true;
2227 	else
2228 		return false;
2229 };
2230 
2231 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2232 {
2233 	u32 lb_partitions = wm->lb_size / wm->src_width;
2234 	u32 line_time = wm->active_time + wm->blank_time;
2235 	u32 latency_tolerant_lines;
2236 	u32 latency_hiding;
2237 	fixed20_12 a;
2238 
2239 	a.full = dfixed_const(1);
2240 	if (wm->vsc.full > a.full)
2241 		latency_tolerant_lines = 1;
2242 	else {
2243 		if (lb_partitions <= (wm->vtaps + 1))
2244 			latency_tolerant_lines = 1;
2245 		else
2246 			latency_tolerant_lines = 2;
2247 	}
2248 
2249 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2250 
2251 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2252 		return true;
2253 	else
2254 		return false;
2255 }
2256 
2257 static void evergreen_program_watermarks(struct radeon_device *rdev,
2258 					 struct radeon_crtc *radeon_crtc,
2259 					 u32 lb_size, u32 num_heads)
2260 {
2261 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2262 	struct evergreen_wm_params wm_low, wm_high;
2263 	u32 dram_channels;
2264 	u32 pixel_period;
2265 	u32 line_time = 0;
2266 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2267 	u32 priority_a_mark = 0, priority_b_mark = 0;
2268 	u32 priority_a_cnt = PRIORITY_OFF;
2269 	u32 priority_b_cnt = PRIORITY_OFF;
2270 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2271 	u32 tmp, arb_control3;
2272 	fixed20_12 a, b, c;
2273 
2274 	if (radeon_crtc->base.enabled && num_heads && mode) {
2275 		pixel_period = 1000000 / (u32)mode->clock;
2276 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2277 		priority_a_cnt = 0;
2278 		priority_b_cnt = 0;
2279 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2280 
2281 		/* watermark for high clocks */
2282 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2283 			wm_high.yclk =
2284 				radeon_dpm_get_mclk(rdev, false) * 10;
2285 			wm_high.sclk =
2286 				radeon_dpm_get_sclk(rdev, false) * 10;
2287 		} else {
2288 			wm_high.yclk = rdev->pm.current_mclk * 10;
2289 			wm_high.sclk = rdev->pm.current_sclk * 10;
2290 		}
2291 
2292 		wm_high.disp_clk = mode->clock;
2293 		wm_high.src_width = mode->crtc_hdisplay;
2294 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2295 		wm_high.blank_time = line_time - wm_high.active_time;
2296 		wm_high.interlaced = false;
2297 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2298 			wm_high.interlaced = true;
2299 		wm_high.vsc = radeon_crtc->vsc;
2300 		wm_high.vtaps = 1;
2301 		if (radeon_crtc->rmx_type != RMX_OFF)
2302 			wm_high.vtaps = 2;
2303 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2304 		wm_high.lb_size = lb_size;
2305 		wm_high.dram_channels = dram_channels;
2306 		wm_high.num_heads = num_heads;
2307 
2308 		/* watermark for low clocks */
2309 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2310 			wm_low.yclk =
2311 				radeon_dpm_get_mclk(rdev, true) * 10;
2312 			wm_low.sclk =
2313 				radeon_dpm_get_sclk(rdev, true) * 10;
2314 		} else {
2315 			wm_low.yclk = rdev->pm.current_mclk * 10;
2316 			wm_low.sclk = rdev->pm.current_sclk * 10;
2317 		}
2318 
2319 		wm_low.disp_clk = mode->clock;
2320 		wm_low.src_width = mode->crtc_hdisplay;
2321 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2322 		wm_low.blank_time = line_time - wm_low.active_time;
2323 		wm_low.interlaced = false;
2324 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2325 			wm_low.interlaced = true;
2326 		wm_low.vsc = radeon_crtc->vsc;
2327 		wm_low.vtaps = 1;
2328 		if (radeon_crtc->rmx_type != RMX_OFF)
2329 			wm_low.vtaps = 2;
2330 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2331 		wm_low.lb_size = lb_size;
2332 		wm_low.dram_channels = dram_channels;
2333 		wm_low.num_heads = num_heads;
2334 
2335 		/* set for high clocks */
2336 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2337 		/* set for low clocks */
2338 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2339 
2340 		/* possibly force display priority to high */
2341 		/* should really do this at mode validation time... */
2342 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2343 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2344 		    !evergreen_check_latency_hiding(&wm_high) ||
2345 		    (rdev->disp_priority == 2)) {
2346 			DRM_DEBUG_KMS("force priority a to high\n");
2347 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2348 		}
2349 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2350 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2351 		    !evergreen_check_latency_hiding(&wm_low) ||
2352 		    (rdev->disp_priority == 2)) {
2353 			DRM_DEBUG_KMS("force priority b to high\n");
2354 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2355 		}
2356 
2357 		a.full = dfixed_const(1000);
2358 		b.full = dfixed_const(mode->clock);
2359 		b.full = dfixed_div(b, a);
2360 		c.full = dfixed_const(latency_watermark_a);
2361 		c.full = dfixed_mul(c, b);
2362 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2363 		c.full = dfixed_div(c, a);
2364 		a.full = dfixed_const(16);
2365 		c.full = dfixed_div(c, a);
2366 		priority_a_mark = dfixed_trunc(c);
2367 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2368 
2369 		a.full = dfixed_const(1000);
2370 		b.full = dfixed_const(mode->clock);
2371 		b.full = dfixed_div(b, a);
2372 		c.full = dfixed_const(latency_watermark_b);
2373 		c.full = dfixed_mul(c, b);
2374 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2375 		c.full = dfixed_div(c, a);
2376 		a.full = dfixed_const(16);
2377 		c.full = dfixed_div(c, a);
2378 		priority_b_mark = dfixed_trunc(c);
2379 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2380 
2381 		/* Save number of lines the linebuffer leads before the scanout */
2382 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2383 	}
2384 
2385 	/* select wm A */
2386 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2387 	tmp = arb_control3;
2388 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2389 	tmp |= LATENCY_WATERMARK_MASK(1);
2390 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2391 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2392 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2393 		LATENCY_HIGH_WATERMARK(line_time)));
2394 	/* select wm B */
2395 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2396 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2397 	tmp |= LATENCY_WATERMARK_MASK(2);
2398 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2399 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2400 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2401 		LATENCY_HIGH_WATERMARK(line_time)));
2402 	/* restore original selection */
2403 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2404 
2405 	/* write the priority marks */
2406 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2407 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2408 
2409 	/* save values for DPM */
2410 	radeon_crtc->line_time = line_time;
2411 	radeon_crtc->wm_high = latency_watermark_a;
2412 	radeon_crtc->wm_low = latency_watermark_b;
2413 }
2414 
2415 /**
2416  * evergreen_bandwidth_update - update display watermarks callback.
2417  *
2418  * @rdev: radeon_device pointer
2419  *
2420  * Update the display watermarks based on the requested mode(s)
2421  * (evergreen+).
2422  */
2423 void evergreen_bandwidth_update(struct radeon_device *rdev)
2424 {
2425 	struct drm_display_mode *mode0 = NULL;
2426 	struct drm_display_mode *mode1 = NULL;
2427 	u32 num_heads = 0, lb_size;
2428 	int i;
2429 
2430 	if (!rdev->mode_info.mode_config_initialized)
2431 		return;
2432 
2433 	radeon_update_display_priority(rdev);
2434 
2435 	for (i = 0; i < rdev->num_crtc; i++) {
2436 		if (rdev->mode_info.crtcs[i]->base.enabled)
2437 			num_heads++;
2438 	}
2439 	for (i = 0; i < rdev->num_crtc; i += 2) {
2440 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2441 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2442 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2443 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2444 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2445 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2446 	}
2447 }
2448 
2449 /**
2450  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2451  *
2452  * @rdev: radeon_device pointer
2453  *
2454  * Wait for the MC (memory controller) to be idle.
2455  * (evergreen+).
2456  * Returns 0 if the MC is idle, -1 if not.
2457  */
2458 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2459 {
2460 	unsigned i;
2461 	u32 tmp;
2462 
2463 	for (i = 0; i < rdev->usec_timeout; i++) {
2464 		/* read MC_STATUS */
2465 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2466 		if (!tmp)
2467 			return 0;
2468 		udelay(1);
2469 	}
2470 	return -1;
2471 }
2472 
2473 /*
2474  * GART
2475  */
2476 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2477 {
2478 	unsigned i;
2479 	u32 tmp;
2480 
2481 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2482 
2483 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2484 	for (i = 0; i < rdev->usec_timeout; i++) {
2485 		/* read MC_STATUS */
2486 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2487 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2488 		if (tmp == 2) {
2489 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2490 			return;
2491 		}
2492 		if (tmp) {
2493 			return;
2494 		}
2495 		udelay(1);
2496 	}
2497 }
2498 
2499 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2500 {
2501 	u32 tmp;
2502 	int r;
2503 
2504 	if (rdev->gart.robj == NULL) {
2505 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2506 		return -EINVAL;
2507 	}
2508 	r = radeon_gart_table_vram_pin(rdev);
2509 	if (r)
2510 		return r;
2511 	/* Setup L2 cache */
2512 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2513 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2514 				EFFECTIVE_L2_QUEUE_SIZE(7));
2515 	WREG32(VM_L2_CNTL2, 0);
2516 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2517 	/* Setup TLB control */
2518 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2519 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2520 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2521 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2522 	if (rdev->flags & RADEON_IS_IGP) {
2523 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2524 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2525 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2526 	} else {
2527 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2528 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2529 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2530 		if ((rdev->family == CHIP_JUNIPER) ||
2531 		    (rdev->family == CHIP_CYPRESS) ||
2532 		    (rdev->family == CHIP_HEMLOCK) ||
2533 		    (rdev->family == CHIP_BARTS))
2534 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2535 	}
2536 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2537 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2538 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2539 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2540 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2541 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2542 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2543 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2544 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2545 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2546 			(u32)(rdev->dummy_page.addr >> 12));
2547 	WREG32(VM_CONTEXT1_CNTL, 0);
2548 
2549 	evergreen_pcie_gart_tlb_flush(rdev);
2550 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2551 		 (unsigned)(rdev->mc.gtt_size >> 20),
2552 		 (unsigned long long)rdev->gart.table_addr);
2553 	rdev->gart.ready = true;
2554 	return 0;
2555 }
2556 
2557 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2558 {
2559 	u32 tmp;
2560 
2561 	/* Disable all tables */
2562 	WREG32(VM_CONTEXT0_CNTL, 0);
2563 	WREG32(VM_CONTEXT1_CNTL, 0);
2564 
2565 	/* Setup L2 cache */
2566 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2567 				EFFECTIVE_L2_QUEUE_SIZE(7));
2568 	WREG32(VM_L2_CNTL2, 0);
2569 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2570 	/* Setup TLB control */
2571 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2572 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2573 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2574 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2575 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2576 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2577 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2578 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2579 	radeon_gart_table_vram_unpin(rdev);
2580 }
2581 
2582 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2583 {
2584 	evergreen_pcie_gart_disable(rdev);
2585 	radeon_gart_table_vram_free(rdev);
2586 	radeon_gart_fini(rdev);
2587 }
2588 
2589 
2590 static void evergreen_agp_enable(struct radeon_device *rdev)
2591 {
2592 	u32 tmp;
2593 
2594 	/* Setup L2 cache */
2595 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2596 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2597 				EFFECTIVE_L2_QUEUE_SIZE(7));
2598 	WREG32(VM_L2_CNTL2, 0);
2599 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2600 	/* Setup TLB control */
2601 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2602 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2603 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2604 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2605 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2606 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2607 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2608 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2609 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2610 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2611 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2612 	WREG32(VM_CONTEXT0_CNTL, 0);
2613 	WREG32(VM_CONTEXT1_CNTL, 0);
2614 }
2615 
2616 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2617 {
2618 	u32 crtc_enabled, tmp, frame_count, blackout;
2619 	int i, j;
2620 
2621 	if (!ASIC_IS_NODCE(rdev)) {
2622 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2623 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2624 
2625 		/* disable VGA render */
2626 		WREG32(VGA_RENDER_CONTROL, 0);
2627 	}
2628 	/* blank the display controllers */
2629 	for (i = 0; i < rdev->num_crtc; i++) {
2630 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2631 		if (crtc_enabled) {
2632 			save->crtc_enabled[i] = true;
2633 			if (ASIC_IS_DCE6(rdev)) {
2634 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2635 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2636 					radeon_wait_for_vblank(rdev, i);
2637 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2638 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2639 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2640 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2641 				}
2642 			} else {
2643 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2644 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2645 					radeon_wait_for_vblank(rdev, i);
2646 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2647 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2648 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2649 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2650 				}
2651 			}
2652 			/* wait for the next frame */
2653 			frame_count = radeon_get_vblank_counter(rdev, i);
2654 			for (j = 0; j < rdev->usec_timeout; j++) {
2655 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2656 					break;
2657 				udelay(1);
2658 			}
2659 
2660 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2661 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2662 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2663 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2664 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2665 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2666 			save->crtc_enabled[i] = false;
2667 			/* ***** */
2668 		} else {
2669 			save->crtc_enabled[i] = false;
2670 		}
2671 	}
2672 
2673 	radeon_mc_wait_for_idle(rdev);
2674 
2675 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2676 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2677 		/* Block CPU access */
2678 		WREG32(BIF_FB_EN, 0);
2679 		/* blackout the MC */
2680 		blackout &= ~BLACKOUT_MODE_MASK;
2681 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2682 	}
2683 	/* wait for the MC to settle */
2684 	udelay(100);
2685 
2686 	/* lock double buffered regs */
2687 	for (i = 0; i < rdev->num_crtc; i++) {
2688 		if (save->crtc_enabled[i]) {
2689 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2690 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2691 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2692 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2693 			}
2694 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2695 			if (!(tmp & 1)) {
2696 				tmp |= 1;
2697 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2698 			}
2699 		}
2700 	}
2701 }
2702 
2703 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2704 {
2705 	u32 tmp, frame_count;
2706 	int i, j;
2707 
2708 	/* update crtc base addresses */
2709 	for (i = 0; i < rdev->num_crtc; i++) {
2710 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2711 		       upper_32_bits(rdev->mc.vram_start));
2712 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2713 		       upper_32_bits(rdev->mc.vram_start));
2714 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2715 		       (u32)rdev->mc.vram_start);
2716 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2717 		       (u32)rdev->mc.vram_start);
2718 	}
2719 
2720 	if (!ASIC_IS_NODCE(rdev)) {
2721 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2722 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2723 	}
2724 
2725 	/* unlock regs and wait for update */
2726 	for (i = 0; i < rdev->num_crtc; i++) {
2727 		if (save->crtc_enabled[i]) {
2728 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2729 			if ((tmp & 0x7) != 3) {
2730 				tmp &= ~0x7;
2731 				tmp |= 0x3;
2732 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2733 			}
2734 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2735 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2736 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2737 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2738 			}
2739 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2740 			if (tmp & 1) {
2741 				tmp &= ~1;
2742 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2743 			}
2744 			for (j = 0; j < rdev->usec_timeout; j++) {
2745 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2746 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2747 					break;
2748 				udelay(1);
2749 			}
2750 		}
2751 	}
2752 
2753 	/* unblackout the MC */
2754 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2755 	tmp &= ~BLACKOUT_MODE_MASK;
2756 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2757 	/* allow CPU access */
2758 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2759 
2760 	for (i = 0; i < rdev->num_crtc; i++) {
2761 		if (save->crtc_enabled[i]) {
2762 			if (ASIC_IS_DCE6(rdev)) {
2763 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2764 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2765 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2766 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2767 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2768 			} else {
2769 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2770 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2771 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2772 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2773 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2774 			}
2775 			/* wait for the next frame */
2776 			frame_count = radeon_get_vblank_counter(rdev, i);
2777 			for (j = 0; j < rdev->usec_timeout; j++) {
2778 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2779 					break;
2780 				udelay(1);
2781 			}
2782 		}
2783 	}
2784 	if (!ASIC_IS_NODCE(rdev)) {
2785 		/* Unlock vga access */
2786 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2787 		mdelay(1);
2788 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2789 	}
2790 }
2791 
2792 void evergreen_mc_program(struct radeon_device *rdev)
2793 {
2794 	struct evergreen_mc_save save;
2795 	u32 tmp;
2796 	int i, j;
2797 
2798 	/* Initialize HDP */
2799 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2800 		WREG32((0x2c14 + j), 0x00000000);
2801 		WREG32((0x2c18 + j), 0x00000000);
2802 		WREG32((0x2c1c + j), 0x00000000);
2803 		WREG32((0x2c20 + j), 0x00000000);
2804 		WREG32((0x2c24 + j), 0x00000000);
2805 	}
2806 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2807 
2808 	evergreen_mc_stop(rdev, &save);
2809 	if (evergreen_mc_wait_for_idle(rdev)) {
2810 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2811 	}
2812 	/* Lockout access through VGA aperture*/
2813 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2814 	/* Update configuration */
2815 	if (rdev->flags & RADEON_IS_AGP) {
2816 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2817 			/* VRAM before AGP */
2818 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2819 				rdev->mc.vram_start >> 12);
2820 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2821 				rdev->mc.gtt_end >> 12);
2822 		} else {
2823 			/* VRAM after AGP */
2824 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2825 				rdev->mc.gtt_start >> 12);
2826 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2827 				rdev->mc.vram_end >> 12);
2828 		}
2829 	} else {
2830 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2831 			rdev->mc.vram_start >> 12);
2832 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2833 			rdev->mc.vram_end >> 12);
2834 	}
2835 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2836 	/* llano/ontario only */
2837 	if ((rdev->family == CHIP_PALM) ||
2838 	    (rdev->family == CHIP_SUMO) ||
2839 	    (rdev->family == CHIP_SUMO2)) {
2840 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2841 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2842 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2843 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2844 	}
2845 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2846 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2847 	WREG32(MC_VM_FB_LOCATION, tmp);
2848 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2849 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2850 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2851 	if (rdev->flags & RADEON_IS_AGP) {
2852 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2853 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2854 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2855 	} else {
2856 		WREG32(MC_VM_AGP_BASE, 0);
2857 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2858 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2859 	}
2860 	if (evergreen_mc_wait_for_idle(rdev)) {
2861 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2862 	}
2863 	evergreen_mc_resume(rdev, &save);
2864 	/* we need to own VRAM, so turn off the VGA renderer here
2865 	 * to stop it overwriting our objects */
2866 	rv515_vga_render_disable(rdev);
2867 }
2868 
2869 /*
2870  * CP.
2871  */
2872 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2873 {
2874 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2875 	u32 next_rptr;
2876 
2877 	/* set to DX10/11 mode */
2878 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2879 	radeon_ring_write(ring, 1);
2880 
2881 	if (ring->rptr_save_reg) {
2882 		next_rptr = ring->wptr + 3 + 4;
2883 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2884 		radeon_ring_write(ring, ((ring->rptr_save_reg -
2885 					  PACKET3_SET_CONFIG_REG_START) >> 2));
2886 		radeon_ring_write(ring, next_rptr);
2887 	} else if (rdev->wb.enabled) {
2888 		next_rptr = ring->wptr + 5 + 4;
2889 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2890 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2891 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2892 		radeon_ring_write(ring, next_rptr);
2893 		radeon_ring_write(ring, 0);
2894 	}
2895 
2896 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2897 	radeon_ring_write(ring,
2898 #ifdef __BIG_ENDIAN
2899 			  (2 << 0) |
2900 #endif
2901 			  (ib->gpu_addr & 0xFFFFFFFC));
2902 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2903 	radeon_ring_write(ring, ib->length_dw);
2904 }
2905 
2906 
2907 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2908 {
2909 	const __be32 *fw_data;
2910 	int i;
2911 
2912 	if (!rdev->me_fw || !rdev->pfp_fw)
2913 		return -EINVAL;
2914 
2915 	r700_cp_stop(rdev);
2916 	WREG32(CP_RB_CNTL,
2917 #ifdef __BIG_ENDIAN
2918 	       BUF_SWAP_32BIT |
2919 #endif
2920 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2921 
2922 	fw_data = (const __be32 *)rdev->pfp_fw->data;
2923 	WREG32(CP_PFP_UCODE_ADDR, 0);
2924 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2925 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2926 	WREG32(CP_PFP_UCODE_ADDR, 0);
2927 
2928 	fw_data = (const __be32 *)rdev->me_fw->data;
2929 	WREG32(CP_ME_RAM_WADDR, 0);
2930 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2931 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2932 
2933 	WREG32(CP_PFP_UCODE_ADDR, 0);
2934 	WREG32(CP_ME_RAM_WADDR, 0);
2935 	WREG32(CP_ME_RAM_RADDR, 0);
2936 	return 0;
2937 }
2938 
2939 static int evergreen_cp_start(struct radeon_device *rdev)
2940 {
2941 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2942 	int r, i;
2943 	uint32_t cp_me;
2944 
2945 	r = radeon_ring_lock(rdev, ring, 7);
2946 	if (r) {
2947 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2948 		return r;
2949 	}
2950 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2951 	radeon_ring_write(ring, 0x1);
2952 	radeon_ring_write(ring, 0x0);
2953 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2954 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2955 	radeon_ring_write(ring, 0);
2956 	radeon_ring_write(ring, 0);
2957 	radeon_ring_unlock_commit(rdev, ring, false);
2958 
2959 	cp_me = 0xff;
2960 	WREG32(CP_ME_CNTL, cp_me);
2961 
2962 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2963 	if (r) {
2964 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2965 		return r;
2966 	}
2967 
2968 	/* setup clear context state */
2969 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2970 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2971 
2972 	for (i = 0; i < evergreen_default_size; i++)
2973 		radeon_ring_write(ring, evergreen_default_state[i]);
2974 
2975 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2976 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2977 
2978 	/* set clear context state */
2979 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2980 	radeon_ring_write(ring, 0);
2981 
2982 	/* SQ_VTX_BASE_VTX_LOC */
2983 	radeon_ring_write(ring, 0xc0026f00);
2984 	radeon_ring_write(ring, 0x00000000);
2985 	radeon_ring_write(ring, 0x00000000);
2986 	radeon_ring_write(ring, 0x00000000);
2987 
2988 	/* Clear consts */
2989 	radeon_ring_write(ring, 0xc0036f00);
2990 	radeon_ring_write(ring, 0x00000bc4);
2991 	radeon_ring_write(ring, 0xffffffff);
2992 	radeon_ring_write(ring, 0xffffffff);
2993 	radeon_ring_write(ring, 0xffffffff);
2994 
2995 	radeon_ring_write(ring, 0xc0026900);
2996 	radeon_ring_write(ring, 0x00000316);
2997 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2998 	radeon_ring_write(ring, 0x00000010); /*  */
2999 
3000 	radeon_ring_unlock_commit(rdev, ring, false);
3001 
3002 	return 0;
3003 }
3004 
3005 static int evergreen_cp_resume(struct radeon_device *rdev)
3006 {
3007 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3008 	u32 tmp;
3009 	u32 rb_bufsz;
3010 	int r;
3011 
3012 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3013 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3014 				 SOFT_RESET_PA |
3015 				 SOFT_RESET_SH |
3016 				 SOFT_RESET_VGT |
3017 				 SOFT_RESET_SPI |
3018 				 SOFT_RESET_SX));
3019 	RREG32(GRBM_SOFT_RESET);
3020 	mdelay(15);
3021 	WREG32(GRBM_SOFT_RESET, 0);
3022 	RREG32(GRBM_SOFT_RESET);
3023 
3024 	/* Set ring buffer size */
3025 	rb_bufsz = order_base_2(ring->ring_size / 8);
3026 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3027 #ifdef __BIG_ENDIAN
3028 	tmp |= BUF_SWAP_32BIT;
3029 #endif
3030 	WREG32(CP_RB_CNTL, tmp);
3031 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3032 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3033 
3034 	/* Set the write pointer delay */
3035 	WREG32(CP_RB_WPTR_DELAY, 0);
3036 
3037 	/* Initialize the ring buffer's read and write pointers */
3038 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3039 	WREG32(CP_RB_RPTR_WR, 0);
3040 	ring->wptr = 0;
3041 	WREG32(CP_RB_WPTR, ring->wptr);
3042 
3043 	/* set the wb address whether it's enabled or not */
3044 	WREG32(CP_RB_RPTR_ADDR,
3045 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3046 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3047 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3048 
3049 	if (rdev->wb.enabled)
3050 		WREG32(SCRATCH_UMSK, 0xff);
3051 	else {
3052 		tmp |= RB_NO_UPDATE;
3053 		WREG32(SCRATCH_UMSK, 0);
3054 	}
3055 
3056 	mdelay(1);
3057 	WREG32(CP_RB_CNTL, tmp);
3058 
3059 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3060 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3061 
3062 	evergreen_cp_start(rdev);
3063 	ring->ready = true;
3064 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3065 	if (r) {
3066 		ring->ready = false;
3067 		return r;
3068 	}
3069 	return 0;
3070 }
3071 
3072 /*
3073  * Core functions
3074  */
3075 static void evergreen_gpu_init(struct radeon_device *rdev)
3076 {
3077 	u32 gb_addr_config;
3078 	u32 mc_shared_chmap, mc_arb_ramcfg;
3079 	u32 sx_debug_1;
3080 	u32 smx_dc_ctl0;
3081 	u32 sq_config;
3082 	u32 sq_lds_resource_mgmt;
3083 	u32 sq_gpr_resource_mgmt_1;
3084 	u32 sq_gpr_resource_mgmt_2;
3085 	u32 sq_gpr_resource_mgmt_3;
3086 	u32 sq_thread_resource_mgmt;
3087 	u32 sq_thread_resource_mgmt_2;
3088 	u32 sq_stack_resource_mgmt_1;
3089 	u32 sq_stack_resource_mgmt_2;
3090 	u32 sq_stack_resource_mgmt_3;
3091 	u32 vgt_cache_invalidation;
3092 	u32 hdp_host_path_cntl, tmp;
3093 	u32 disabled_rb_mask;
3094 	int i, j, ps_thread_count;
3095 
3096 	switch (rdev->family) {
3097 	case CHIP_CYPRESS:
3098 	case CHIP_HEMLOCK:
3099 		rdev->config.evergreen.num_ses = 2;
3100 		rdev->config.evergreen.max_pipes = 4;
3101 		rdev->config.evergreen.max_tile_pipes = 8;
3102 		rdev->config.evergreen.max_simds = 10;
3103 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3104 		rdev->config.evergreen.max_gprs = 256;
3105 		rdev->config.evergreen.max_threads = 248;
3106 		rdev->config.evergreen.max_gs_threads = 32;
3107 		rdev->config.evergreen.max_stack_entries = 512;
3108 		rdev->config.evergreen.sx_num_of_sets = 4;
3109 		rdev->config.evergreen.sx_max_export_size = 256;
3110 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3111 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3112 		rdev->config.evergreen.max_hw_contexts = 8;
3113 		rdev->config.evergreen.sq_num_cf_insts = 2;
3114 
3115 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3116 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3117 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3118 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3119 		break;
3120 	case CHIP_JUNIPER:
3121 		rdev->config.evergreen.num_ses = 1;
3122 		rdev->config.evergreen.max_pipes = 4;
3123 		rdev->config.evergreen.max_tile_pipes = 4;
3124 		rdev->config.evergreen.max_simds = 10;
3125 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3126 		rdev->config.evergreen.max_gprs = 256;
3127 		rdev->config.evergreen.max_threads = 248;
3128 		rdev->config.evergreen.max_gs_threads = 32;
3129 		rdev->config.evergreen.max_stack_entries = 512;
3130 		rdev->config.evergreen.sx_num_of_sets = 4;
3131 		rdev->config.evergreen.sx_max_export_size = 256;
3132 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3133 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3134 		rdev->config.evergreen.max_hw_contexts = 8;
3135 		rdev->config.evergreen.sq_num_cf_insts = 2;
3136 
3137 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3138 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3139 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3140 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3141 		break;
3142 	case CHIP_REDWOOD:
3143 		rdev->config.evergreen.num_ses = 1;
3144 		rdev->config.evergreen.max_pipes = 4;
3145 		rdev->config.evergreen.max_tile_pipes = 4;
3146 		rdev->config.evergreen.max_simds = 5;
3147 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3148 		rdev->config.evergreen.max_gprs = 256;
3149 		rdev->config.evergreen.max_threads = 248;
3150 		rdev->config.evergreen.max_gs_threads = 32;
3151 		rdev->config.evergreen.max_stack_entries = 256;
3152 		rdev->config.evergreen.sx_num_of_sets = 4;
3153 		rdev->config.evergreen.sx_max_export_size = 256;
3154 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3155 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3156 		rdev->config.evergreen.max_hw_contexts = 8;
3157 		rdev->config.evergreen.sq_num_cf_insts = 2;
3158 
3159 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3160 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3161 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3162 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3163 		break;
3164 	case CHIP_CEDAR:
3165 	default:
3166 		rdev->config.evergreen.num_ses = 1;
3167 		rdev->config.evergreen.max_pipes = 2;
3168 		rdev->config.evergreen.max_tile_pipes = 2;
3169 		rdev->config.evergreen.max_simds = 2;
3170 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3171 		rdev->config.evergreen.max_gprs = 256;
3172 		rdev->config.evergreen.max_threads = 192;
3173 		rdev->config.evergreen.max_gs_threads = 16;
3174 		rdev->config.evergreen.max_stack_entries = 256;
3175 		rdev->config.evergreen.sx_num_of_sets = 4;
3176 		rdev->config.evergreen.sx_max_export_size = 128;
3177 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3178 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3179 		rdev->config.evergreen.max_hw_contexts = 4;
3180 		rdev->config.evergreen.sq_num_cf_insts = 1;
3181 
3182 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3183 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3184 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3185 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3186 		break;
3187 	case CHIP_PALM:
3188 		rdev->config.evergreen.num_ses = 1;
3189 		rdev->config.evergreen.max_pipes = 2;
3190 		rdev->config.evergreen.max_tile_pipes = 2;
3191 		rdev->config.evergreen.max_simds = 2;
3192 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3193 		rdev->config.evergreen.max_gprs = 256;
3194 		rdev->config.evergreen.max_threads = 192;
3195 		rdev->config.evergreen.max_gs_threads = 16;
3196 		rdev->config.evergreen.max_stack_entries = 256;
3197 		rdev->config.evergreen.sx_num_of_sets = 4;
3198 		rdev->config.evergreen.sx_max_export_size = 128;
3199 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3200 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3201 		rdev->config.evergreen.max_hw_contexts = 4;
3202 		rdev->config.evergreen.sq_num_cf_insts = 1;
3203 
3204 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3205 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3206 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3207 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3208 		break;
3209 	case CHIP_SUMO:
3210 		rdev->config.evergreen.num_ses = 1;
3211 		rdev->config.evergreen.max_pipes = 4;
3212 		rdev->config.evergreen.max_tile_pipes = 4;
3213 		if (rdev->pdev->device == 0x9648)
3214 			rdev->config.evergreen.max_simds = 3;
3215 		else if ((rdev->pdev->device == 0x9647) ||
3216 			 (rdev->pdev->device == 0x964a))
3217 			rdev->config.evergreen.max_simds = 4;
3218 		else
3219 			rdev->config.evergreen.max_simds = 5;
3220 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3221 		rdev->config.evergreen.max_gprs = 256;
3222 		rdev->config.evergreen.max_threads = 248;
3223 		rdev->config.evergreen.max_gs_threads = 32;
3224 		rdev->config.evergreen.max_stack_entries = 256;
3225 		rdev->config.evergreen.sx_num_of_sets = 4;
3226 		rdev->config.evergreen.sx_max_export_size = 256;
3227 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3228 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3229 		rdev->config.evergreen.max_hw_contexts = 8;
3230 		rdev->config.evergreen.sq_num_cf_insts = 2;
3231 
3232 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3233 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3234 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3235 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3236 		break;
3237 	case CHIP_SUMO2:
3238 		rdev->config.evergreen.num_ses = 1;
3239 		rdev->config.evergreen.max_pipes = 4;
3240 		rdev->config.evergreen.max_tile_pipes = 4;
3241 		rdev->config.evergreen.max_simds = 2;
3242 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3243 		rdev->config.evergreen.max_gprs = 256;
3244 		rdev->config.evergreen.max_threads = 248;
3245 		rdev->config.evergreen.max_gs_threads = 32;
3246 		rdev->config.evergreen.max_stack_entries = 512;
3247 		rdev->config.evergreen.sx_num_of_sets = 4;
3248 		rdev->config.evergreen.sx_max_export_size = 256;
3249 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3250 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3251 		rdev->config.evergreen.max_hw_contexts = 4;
3252 		rdev->config.evergreen.sq_num_cf_insts = 2;
3253 
3254 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3255 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3256 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3257 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3258 		break;
3259 	case CHIP_BARTS:
3260 		rdev->config.evergreen.num_ses = 2;
3261 		rdev->config.evergreen.max_pipes = 4;
3262 		rdev->config.evergreen.max_tile_pipes = 8;
3263 		rdev->config.evergreen.max_simds = 7;
3264 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3265 		rdev->config.evergreen.max_gprs = 256;
3266 		rdev->config.evergreen.max_threads = 248;
3267 		rdev->config.evergreen.max_gs_threads = 32;
3268 		rdev->config.evergreen.max_stack_entries = 512;
3269 		rdev->config.evergreen.sx_num_of_sets = 4;
3270 		rdev->config.evergreen.sx_max_export_size = 256;
3271 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3272 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3273 		rdev->config.evergreen.max_hw_contexts = 8;
3274 		rdev->config.evergreen.sq_num_cf_insts = 2;
3275 
3276 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3277 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3278 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3279 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3280 		break;
3281 	case CHIP_TURKS:
3282 		rdev->config.evergreen.num_ses = 1;
3283 		rdev->config.evergreen.max_pipes = 4;
3284 		rdev->config.evergreen.max_tile_pipes = 4;
3285 		rdev->config.evergreen.max_simds = 6;
3286 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3287 		rdev->config.evergreen.max_gprs = 256;
3288 		rdev->config.evergreen.max_threads = 248;
3289 		rdev->config.evergreen.max_gs_threads = 32;
3290 		rdev->config.evergreen.max_stack_entries = 256;
3291 		rdev->config.evergreen.sx_num_of_sets = 4;
3292 		rdev->config.evergreen.sx_max_export_size = 256;
3293 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3294 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3295 		rdev->config.evergreen.max_hw_contexts = 8;
3296 		rdev->config.evergreen.sq_num_cf_insts = 2;
3297 
3298 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3299 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3300 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3301 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3302 		break;
3303 	case CHIP_CAICOS:
3304 		rdev->config.evergreen.num_ses = 1;
3305 		rdev->config.evergreen.max_pipes = 2;
3306 		rdev->config.evergreen.max_tile_pipes = 2;
3307 		rdev->config.evergreen.max_simds = 2;
3308 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3309 		rdev->config.evergreen.max_gprs = 256;
3310 		rdev->config.evergreen.max_threads = 192;
3311 		rdev->config.evergreen.max_gs_threads = 16;
3312 		rdev->config.evergreen.max_stack_entries = 256;
3313 		rdev->config.evergreen.sx_num_of_sets = 4;
3314 		rdev->config.evergreen.sx_max_export_size = 128;
3315 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3316 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3317 		rdev->config.evergreen.max_hw_contexts = 4;
3318 		rdev->config.evergreen.sq_num_cf_insts = 1;
3319 
3320 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3321 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3322 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3323 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3324 		break;
3325 	}
3326 
3327 	/* Initialize HDP */
3328 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3329 		WREG32((0x2c14 + j), 0x00000000);
3330 		WREG32((0x2c18 + j), 0x00000000);
3331 		WREG32((0x2c1c + j), 0x00000000);
3332 		WREG32((0x2c20 + j), 0x00000000);
3333 		WREG32((0x2c24 + j), 0x00000000);
3334 	}
3335 
3336 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3337 	WREG32(SRBM_INT_CNTL, 0x1);
3338 	WREG32(SRBM_INT_ACK, 0x1);
3339 
3340 	evergreen_fix_pci_max_read_req_size(rdev);
3341 
3342 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3343 	if ((rdev->family == CHIP_PALM) ||
3344 	    (rdev->family == CHIP_SUMO) ||
3345 	    (rdev->family == CHIP_SUMO2))
3346 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3347 	else
3348 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3349 
3350 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3351 	 * not have bank info, so create a custom tiling dword.
3352 	 * bits 3:0   num_pipes
3353 	 * bits 7:4   num_banks
3354 	 * bits 11:8  group_size
3355 	 * bits 15:12 row_size
3356 	 */
3357 	rdev->config.evergreen.tile_config = 0;
3358 	switch (rdev->config.evergreen.max_tile_pipes) {
3359 	case 1:
3360 	default:
3361 		rdev->config.evergreen.tile_config |= (0 << 0);
3362 		break;
3363 	case 2:
3364 		rdev->config.evergreen.tile_config |= (1 << 0);
3365 		break;
3366 	case 4:
3367 		rdev->config.evergreen.tile_config |= (2 << 0);
3368 		break;
3369 	case 8:
3370 		rdev->config.evergreen.tile_config |= (3 << 0);
3371 		break;
3372 	}
3373 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3374 	if (rdev->flags & RADEON_IS_IGP)
3375 		rdev->config.evergreen.tile_config |= 1 << 4;
3376 	else {
3377 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3378 		case 0: /* four banks */
3379 			rdev->config.evergreen.tile_config |= 0 << 4;
3380 			break;
3381 		case 1: /* eight banks */
3382 			rdev->config.evergreen.tile_config |= 1 << 4;
3383 			break;
3384 		case 2: /* sixteen banks */
3385 		default:
3386 			rdev->config.evergreen.tile_config |= 2 << 4;
3387 			break;
3388 		}
3389 	}
3390 	rdev->config.evergreen.tile_config |= 0 << 8;
3391 	rdev->config.evergreen.tile_config |=
3392 		((gb_addr_config & 0x30000000) >> 28) << 12;
3393 
3394 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3395 		u32 efuse_straps_4;
3396 		u32 efuse_straps_3;
3397 
3398 		efuse_straps_4 = RREG32_RCU(0x204);
3399 		efuse_straps_3 = RREG32_RCU(0x203);
3400 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3401 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3402 	} else {
3403 		tmp = 0;
3404 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3405 			u32 rb_disable_bitmap;
3406 
3407 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3408 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3409 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3410 			tmp <<= 4;
3411 			tmp |= rb_disable_bitmap;
3412 		}
3413 	}
3414 	/* enabled rb are just the one not disabled :) */
3415 	disabled_rb_mask = tmp;
3416 	tmp = 0;
3417 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3418 		tmp |= (1 << i);
3419 	/* if all the backends are disabled, fix it up here */
3420 	if ((disabled_rb_mask & tmp) == tmp) {
3421 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3422 			disabled_rb_mask &= ~(1 << i);
3423 	}
3424 
3425 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3426 		u32 simd_disable_bitmap;
3427 
3428 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3429 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3430 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3431 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3432 		tmp <<= 16;
3433 		tmp |= simd_disable_bitmap;
3434 	}
3435 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3436 
3437 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3438 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3439 
3440 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3441 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3442 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3443 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3444 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3445 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3446 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3447 
3448 	if ((rdev->config.evergreen.max_backends == 1) &&
3449 	    (rdev->flags & RADEON_IS_IGP)) {
3450 		if ((disabled_rb_mask & 3) == 1) {
3451 			/* RB0 disabled, RB1 enabled */
3452 			tmp = 0x11111111;
3453 		} else {
3454 			/* RB1 disabled, RB0 enabled */
3455 			tmp = 0x00000000;
3456 		}
3457 	} else {
3458 		tmp = gb_addr_config & NUM_PIPES_MASK;
3459 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3460 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3461 	}
3462 	WREG32(GB_BACKEND_MAP, tmp);
3463 
3464 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3465 	WREG32(CGTS_TCC_DISABLE, 0);
3466 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3467 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3468 
3469 	/* set HW defaults for 3D engine */
3470 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3471 				     ROQ_IB2_START(0x2b)));
3472 
3473 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3474 
3475 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3476 			     SYNC_GRADIENT |
3477 			     SYNC_WALKER |
3478 			     SYNC_ALIGNER));
3479 
3480 	sx_debug_1 = RREG32(SX_DEBUG_1);
3481 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3482 	WREG32(SX_DEBUG_1, sx_debug_1);
3483 
3484 
3485 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3486 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3487 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3488 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3489 
3490 	if (rdev->family <= CHIP_SUMO2)
3491 		WREG32(SMX_SAR_CTL0, 0x00010000);
3492 
3493 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3494 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3495 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3496 
3497 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3498 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3499 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3500 
3501 	WREG32(VGT_NUM_INSTANCES, 1);
3502 	WREG32(SPI_CONFIG_CNTL, 0);
3503 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3504 	WREG32(CP_PERFMON_CNTL, 0);
3505 
3506 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3507 				  FETCH_FIFO_HIWATER(0x4) |
3508 				  DONE_FIFO_HIWATER(0xe0) |
3509 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3510 
3511 	sq_config = RREG32(SQ_CONFIG);
3512 	sq_config &= ~(PS_PRIO(3) |
3513 		       VS_PRIO(3) |
3514 		       GS_PRIO(3) |
3515 		       ES_PRIO(3));
3516 	sq_config |= (VC_ENABLE |
3517 		      EXPORT_SRC_C |
3518 		      PS_PRIO(0) |
3519 		      VS_PRIO(1) |
3520 		      GS_PRIO(2) |
3521 		      ES_PRIO(3));
3522 
3523 	switch (rdev->family) {
3524 	case CHIP_CEDAR:
3525 	case CHIP_PALM:
3526 	case CHIP_SUMO:
3527 	case CHIP_SUMO2:
3528 	case CHIP_CAICOS:
3529 		/* no vertex cache */
3530 		sq_config &= ~VC_ENABLE;
3531 		break;
3532 	default:
3533 		break;
3534 	}
3535 
3536 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3537 
3538 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3539 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3540 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3541 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3542 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3543 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3544 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3545 
3546 	switch (rdev->family) {
3547 	case CHIP_CEDAR:
3548 	case CHIP_PALM:
3549 	case CHIP_SUMO:
3550 	case CHIP_SUMO2:
3551 		ps_thread_count = 96;
3552 		break;
3553 	default:
3554 		ps_thread_count = 128;
3555 		break;
3556 	}
3557 
3558 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3559 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3560 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3561 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3562 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3563 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3564 
3565 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3566 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3567 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3568 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3569 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3570 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3571 
3572 	WREG32(SQ_CONFIG, sq_config);
3573 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3574 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3575 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3576 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3577 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3578 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3579 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3580 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3581 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3582 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3583 
3584 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3585 					  FORCE_EOV_MAX_REZ_CNT(255)));
3586 
3587 	switch (rdev->family) {
3588 	case CHIP_CEDAR:
3589 	case CHIP_PALM:
3590 	case CHIP_SUMO:
3591 	case CHIP_SUMO2:
3592 	case CHIP_CAICOS:
3593 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3594 		break;
3595 	default:
3596 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3597 		break;
3598 	}
3599 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3600 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3601 
3602 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3603 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3604 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3605 
3606 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3607 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3608 
3609 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3610 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3611 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3612 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3613 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3614 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3615 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3616 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3617 
3618 	/* clear render buffer base addresses */
3619 	WREG32(CB_COLOR0_BASE, 0);
3620 	WREG32(CB_COLOR1_BASE, 0);
3621 	WREG32(CB_COLOR2_BASE, 0);
3622 	WREG32(CB_COLOR3_BASE, 0);
3623 	WREG32(CB_COLOR4_BASE, 0);
3624 	WREG32(CB_COLOR5_BASE, 0);
3625 	WREG32(CB_COLOR6_BASE, 0);
3626 	WREG32(CB_COLOR7_BASE, 0);
3627 	WREG32(CB_COLOR8_BASE, 0);
3628 	WREG32(CB_COLOR9_BASE, 0);
3629 	WREG32(CB_COLOR10_BASE, 0);
3630 	WREG32(CB_COLOR11_BASE, 0);
3631 
3632 	/* set the shader const cache sizes to 0 */
3633 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3634 		WREG32(i, 0);
3635 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3636 		WREG32(i, 0);
3637 
3638 	tmp = RREG32(HDP_MISC_CNTL);
3639 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3640 	WREG32(HDP_MISC_CNTL, tmp);
3641 
3642 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3643 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3644 
3645 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3646 
3647 	udelay(50);
3648 
3649 }
3650 
3651 int evergreen_mc_init(struct radeon_device *rdev)
3652 {
3653 	u32 tmp;
3654 	int chansize, numchan;
3655 
3656 	/* Get VRAM informations */
3657 	rdev->mc.vram_is_ddr = true;
3658 	if ((rdev->family == CHIP_PALM) ||
3659 	    (rdev->family == CHIP_SUMO) ||
3660 	    (rdev->family == CHIP_SUMO2))
3661 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3662 	else
3663 		tmp = RREG32(MC_ARB_RAMCFG);
3664 	if (tmp & CHANSIZE_OVERRIDE) {
3665 		chansize = 16;
3666 	} else if (tmp & CHANSIZE_MASK) {
3667 		chansize = 64;
3668 	} else {
3669 		chansize = 32;
3670 	}
3671 	tmp = RREG32(MC_SHARED_CHMAP);
3672 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3673 	case 0:
3674 	default:
3675 		numchan = 1;
3676 		break;
3677 	case 1:
3678 		numchan = 2;
3679 		break;
3680 	case 2:
3681 		numchan = 4;
3682 		break;
3683 	case 3:
3684 		numchan = 8;
3685 		break;
3686 	}
3687 	rdev->mc.vram_width = numchan * chansize;
3688 	/* Could aper size report 0 ? */
3689 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3690 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3691 	/* Setup GPU memory space */
3692 	if ((rdev->family == CHIP_PALM) ||
3693 	    (rdev->family == CHIP_SUMO) ||
3694 	    (rdev->family == CHIP_SUMO2)) {
3695 		/* size in bytes on fusion */
3696 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3697 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3698 	} else {
3699 		/* size in MB on evergreen/cayman/tn */
3700 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3701 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3702 	}
3703 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3704 	r700_vram_gtt_location(rdev, &rdev->mc);
3705 	radeon_update_bandwidth_info(rdev);
3706 
3707 	return 0;
3708 }
3709 
3710 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3711 {
3712 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3713 		RREG32(GRBM_STATUS));
3714 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3715 		RREG32(GRBM_STATUS_SE0));
3716 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3717 		RREG32(GRBM_STATUS_SE1));
3718 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3719 		RREG32(SRBM_STATUS));
3720 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3721 		RREG32(SRBM_STATUS2));
3722 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3723 		RREG32(CP_STALLED_STAT1));
3724 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3725 		RREG32(CP_STALLED_STAT2));
3726 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3727 		RREG32(CP_BUSY_STAT));
3728 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3729 		RREG32(CP_STAT));
3730 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3731 		RREG32(DMA_STATUS_REG));
3732 	if (rdev->family >= CHIP_CAYMAN) {
3733 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3734 			 RREG32(DMA_STATUS_REG + 0x800));
3735 	}
3736 }
3737 
3738 bool evergreen_is_display_hung(struct radeon_device *rdev)
3739 {
3740 	u32 crtc_hung = 0;
3741 	u32 crtc_status[6];
3742 	u32 i, j, tmp;
3743 
3744 	for (i = 0; i < rdev->num_crtc; i++) {
3745 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3746 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3747 			crtc_hung |= (1 << i);
3748 		}
3749 	}
3750 
3751 	for (j = 0; j < 10; j++) {
3752 		for (i = 0; i < rdev->num_crtc; i++) {
3753 			if (crtc_hung & (1 << i)) {
3754 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3755 				if (tmp != crtc_status[i])
3756 					crtc_hung &= ~(1 << i);
3757 			}
3758 		}
3759 		if (crtc_hung == 0)
3760 			return false;
3761 		udelay(100);
3762 	}
3763 
3764 	return true;
3765 }
3766 
3767 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3768 {
3769 	u32 reset_mask = 0;
3770 	u32 tmp;
3771 
3772 	/* GRBM_STATUS */
3773 	tmp = RREG32(GRBM_STATUS);
3774 	if (tmp & (PA_BUSY | SC_BUSY |
3775 		   SH_BUSY | SX_BUSY |
3776 		   TA_BUSY | VGT_BUSY |
3777 		   DB_BUSY | CB_BUSY |
3778 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3779 		reset_mask |= RADEON_RESET_GFX;
3780 
3781 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3782 		   CP_BUSY | CP_COHERENCY_BUSY))
3783 		reset_mask |= RADEON_RESET_CP;
3784 
3785 	if (tmp & GRBM_EE_BUSY)
3786 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3787 
3788 	/* DMA_STATUS_REG */
3789 	tmp = RREG32(DMA_STATUS_REG);
3790 	if (!(tmp & DMA_IDLE))
3791 		reset_mask |= RADEON_RESET_DMA;
3792 
3793 	/* SRBM_STATUS2 */
3794 	tmp = RREG32(SRBM_STATUS2);
3795 	if (tmp & DMA_BUSY)
3796 		reset_mask |= RADEON_RESET_DMA;
3797 
3798 	/* SRBM_STATUS */
3799 	tmp = RREG32(SRBM_STATUS);
3800 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3801 		reset_mask |= RADEON_RESET_RLC;
3802 
3803 	if (tmp & IH_BUSY)
3804 		reset_mask |= RADEON_RESET_IH;
3805 
3806 	if (tmp & SEM_BUSY)
3807 		reset_mask |= RADEON_RESET_SEM;
3808 
3809 	if (tmp & GRBM_RQ_PENDING)
3810 		reset_mask |= RADEON_RESET_GRBM;
3811 
3812 	if (tmp & VMC_BUSY)
3813 		reset_mask |= RADEON_RESET_VMC;
3814 
3815 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3816 		   MCC_BUSY | MCD_BUSY))
3817 		reset_mask |= RADEON_RESET_MC;
3818 
3819 	if (evergreen_is_display_hung(rdev))
3820 		reset_mask |= RADEON_RESET_DISPLAY;
3821 
3822 	/* VM_L2_STATUS */
3823 	tmp = RREG32(VM_L2_STATUS);
3824 	if (tmp & L2_BUSY)
3825 		reset_mask |= RADEON_RESET_VMC;
3826 
3827 	/* Skip MC reset as it's mostly likely not hung, just busy */
3828 	if (reset_mask & RADEON_RESET_MC) {
3829 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3830 		reset_mask &= ~RADEON_RESET_MC;
3831 	}
3832 
3833 	return reset_mask;
3834 }
3835 
3836 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3837 {
3838 	struct evergreen_mc_save save;
3839 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3840 	u32 tmp;
3841 
3842 	if (reset_mask == 0)
3843 		return;
3844 
3845 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3846 
3847 	evergreen_print_gpu_status_regs(rdev);
3848 
3849 	/* Disable CP parsing/prefetching */
3850 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3851 
3852 	if (reset_mask & RADEON_RESET_DMA) {
3853 		/* Disable DMA */
3854 		tmp = RREG32(DMA_RB_CNTL);
3855 		tmp &= ~DMA_RB_ENABLE;
3856 		WREG32(DMA_RB_CNTL, tmp);
3857 	}
3858 
3859 	udelay(50);
3860 
3861 	evergreen_mc_stop(rdev, &save);
3862 	if (evergreen_mc_wait_for_idle(rdev)) {
3863 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3864 	}
3865 
3866 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3867 		grbm_soft_reset |= SOFT_RESET_DB |
3868 			SOFT_RESET_CB |
3869 			SOFT_RESET_PA |
3870 			SOFT_RESET_SC |
3871 			SOFT_RESET_SPI |
3872 			SOFT_RESET_SX |
3873 			SOFT_RESET_SH |
3874 			SOFT_RESET_TC |
3875 			SOFT_RESET_TA |
3876 			SOFT_RESET_VC |
3877 			SOFT_RESET_VGT;
3878 	}
3879 
3880 	if (reset_mask & RADEON_RESET_CP) {
3881 		grbm_soft_reset |= SOFT_RESET_CP |
3882 			SOFT_RESET_VGT;
3883 
3884 		srbm_soft_reset |= SOFT_RESET_GRBM;
3885 	}
3886 
3887 	if (reset_mask & RADEON_RESET_DMA)
3888 		srbm_soft_reset |= SOFT_RESET_DMA;
3889 
3890 	if (reset_mask & RADEON_RESET_DISPLAY)
3891 		srbm_soft_reset |= SOFT_RESET_DC;
3892 
3893 	if (reset_mask & RADEON_RESET_RLC)
3894 		srbm_soft_reset |= SOFT_RESET_RLC;
3895 
3896 	if (reset_mask & RADEON_RESET_SEM)
3897 		srbm_soft_reset |= SOFT_RESET_SEM;
3898 
3899 	if (reset_mask & RADEON_RESET_IH)
3900 		srbm_soft_reset |= SOFT_RESET_IH;
3901 
3902 	if (reset_mask & RADEON_RESET_GRBM)
3903 		srbm_soft_reset |= SOFT_RESET_GRBM;
3904 
3905 	if (reset_mask & RADEON_RESET_VMC)
3906 		srbm_soft_reset |= SOFT_RESET_VMC;
3907 
3908 	if (!(rdev->flags & RADEON_IS_IGP)) {
3909 		if (reset_mask & RADEON_RESET_MC)
3910 			srbm_soft_reset |= SOFT_RESET_MC;
3911 	}
3912 
3913 	if (grbm_soft_reset) {
3914 		tmp = RREG32(GRBM_SOFT_RESET);
3915 		tmp |= grbm_soft_reset;
3916 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3917 		WREG32(GRBM_SOFT_RESET, tmp);
3918 		tmp = RREG32(GRBM_SOFT_RESET);
3919 
3920 		udelay(50);
3921 
3922 		tmp &= ~grbm_soft_reset;
3923 		WREG32(GRBM_SOFT_RESET, tmp);
3924 		tmp = RREG32(GRBM_SOFT_RESET);
3925 	}
3926 
3927 	if (srbm_soft_reset) {
3928 		tmp = RREG32(SRBM_SOFT_RESET);
3929 		tmp |= srbm_soft_reset;
3930 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3931 		WREG32(SRBM_SOFT_RESET, tmp);
3932 		tmp = RREG32(SRBM_SOFT_RESET);
3933 
3934 		udelay(50);
3935 
3936 		tmp &= ~srbm_soft_reset;
3937 		WREG32(SRBM_SOFT_RESET, tmp);
3938 		tmp = RREG32(SRBM_SOFT_RESET);
3939 	}
3940 
3941 	/* Wait a little for things to settle down */
3942 	udelay(50);
3943 
3944 	evergreen_mc_resume(rdev, &save);
3945 	udelay(50);
3946 
3947 	evergreen_print_gpu_status_regs(rdev);
3948 }
3949 
3950 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3951 {
3952 	struct evergreen_mc_save save;
3953 	u32 tmp, i;
3954 
3955 	dev_info(rdev->dev, "GPU pci config reset\n");
3956 
3957 	/* disable dpm? */
3958 
3959 	/* Disable CP parsing/prefetching */
3960 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3961 	udelay(50);
3962 	/* Disable DMA */
3963 	tmp = RREG32(DMA_RB_CNTL);
3964 	tmp &= ~DMA_RB_ENABLE;
3965 	WREG32(DMA_RB_CNTL, tmp);
3966 	/* XXX other engines? */
3967 
3968 	/* halt the rlc */
3969 	r600_rlc_stop(rdev);
3970 
3971 	udelay(50);
3972 
3973 	/* set mclk/sclk to bypass */
3974 	rv770_set_clk_bypass_mode(rdev);
3975 	/* disable BM */
3976 	pci_clear_master(rdev->pdev);
3977 	/* disable mem access */
3978 	evergreen_mc_stop(rdev, &save);
3979 	if (evergreen_mc_wait_for_idle(rdev)) {
3980 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3981 	}
3982 	/* reset */
3983 	radeon_pci_config_reset(rdev);
3984 	/* wait for asic to come out of reset */
3985 	for (i = 0; i < rdev->usec_timeout; i++) {
3986 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3987 			break;
3988 		udelay(1);
3989 	}
3990 }
3991 
3992 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
3993 {
3994 	u32 reset_mask;
3995 
3996 	if (hard) {
3997 		evergreen_gpu_pci_config_reset(rdev);
3998 		return 0;
3999 	}
4000 
4001 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4002 
4003 	if (reset_mask)
4004 		r600_set_bios_scratch_engine_hung(rdev, true);
4005 
4006 	/* try soft reset */
4007 	evergreen_gpu_soft_reset(rdev, reset_mask);
4008 
4009 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4010 
4011 	/* try pci config reset */
4012 	if (reset_mask && radeon_hard_reset)
4013 		evergreen_gpu_pci_config_reset(rdev);
4014 
4015 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4016 
4017 	if (!reset_mask)
4018 		r600_set_bios_scratch_engine_hung(rdev, false);
4019 
4020 	return 0;
4021 }
4022 
4023 /**
4024  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4025  *
4026  * @rdev: radeon_device pointer
4027  * @ring: radeon_ring structure holding ring information
4028  *
4029  * Check if the GFX engine is locked up.
4030  * Returns true if the engine appears to be locked up, false if not.
4031  */
4032 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4033 {
4034 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4035 
4036 	if (!(reset_mask & (RADEON_RESET_GFX |
4037 			    RADEON_RESET_COMPUTE |
4038 			    RADEON_RESET_CP))) {
4039 		radeon_ring_lockup_update(rdev, ring);
4040 		return false;
4041 	}
4042 	return radeon_ring_test_lockup(rdev, ring);
4043 }
4044 
4045 /*
4046  * RLC
4047  */
4048 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4049 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4050 
4051 void sumo_rlc_fini(struct radeon_device *rdev)
4052 {
4053 	int r;
4054 
4055 	/* save restore block */
4056 	if (rdev->rlc.save_restore_obj) {
4057 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4058 		if (unlikely(r != 0))
4059 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4060 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4061 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4062 
4063 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4064 		rdev->rlc.save_restore_obj = NULL;
4065 	}
4066 
4067 	/* clear state block */
4068 	if (rdev->rlc.clear_state_obj) {
4069 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4070 		if (unlikely(r != 0))
4071 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4072 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4073 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4074 
4075 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4076 		rdev->rlc.clear_state_obj = NULL;
4077 	}
4078 
4079 	/* clear state block */
4080 	if (rdev->rlc.cp_table_obj) {
4081 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4082 		if (unlikely(r != 0))
4083 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4084 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4085 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4086 
4087 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4088 		rdev->rlc.cp_table_obj = NULL;
4089 	}
4090 }
4091 
4092 #define CP_ME_TABLE_SIZE    96
4093 
4094 int sumo_rlc_init(struct radeon_device *rdev)
4095 {
4096 	const u32 *src_ptr;
4097 	volatile u32 *dst_ptr;
4098 	u32 dws, data, i, j, k, reg_num;
4099 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4100 	u64 reg_list_mc_addr;
4101 	const struct cs_section_def *cs_data;
4102 	int r;
4103 
4104 	src_ptr = rdev->rlc.reg_list;
4105 	dws = rdev->rlc.reg_list_size;
4106 	if (rdev->family >= CHIP_BONAIRE) {
4107 		dws += (5 * 16) + 48 + 48 + 64;
4108 	}
4109 	cs_data = rdev->rlc.cs_data;
4110 
4111 	if (src_ptr) {
4112 		/* save restore block */
4113 		if (rdev->rlc.save_restore_obj == NULL) {
4114 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4115 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4116 					     NULL, &rdev->rlc.save_restore_obj);
4117 			if (r) {
4118 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4119 				return r;
4120 			}
4121 		}
4122 
4123 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4124 		if (unlikely(r != 0)) {
4125 			sumo_rlc_fini(rdev);
4126 			return r;
4127 		}
4128 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4129 				  &rdev->rlc.save_restore_gpu_addr);
4130 		if (r) {
4131 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4132 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4133 			sumo_rlc_fini(rdev);
4134 			return r;
4135 		}
4136 
4137 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4138 		if (r) {
4139 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4140 			sumo_rlc_fini(rdev);
4141 			return r;
4142 		}
4143 		/* write the sr buffer */
4144 		dst_ptr = rdev->rlc.sr_ptr;
4145 		if (rdev->family >= CHIP_TAHITI) {
4146 			/* SI */
4147 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4148 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4149 		} else {
4150 			/* ON/LN/TN */
4151 			/* format:
4152 			 * dw0: (reg2 << 16) | reg1
4153 			 * dw1: reg1 save space
4154 			 * dw2: reg2 save space
4155 			 */
4156 			for (i = 0; i < dws; i++) {
4157 				data = src_ptr[i] >> 2;
4158 				i++;
4159 				if (i < dws)
4160 					data |= (src_ptr[i] >> 2) << 16;
4161 				j = (((i - 1) * 3) / 2);
4162 				dst_ptr[j] = cpu_to_le32(data);
4163 			}
4164 			j = ((i * 3) / 2);
4165 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4166 		}
4167 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4168 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4169 	}
4170 
4171 	if (cs_data) {
4172 		/* clear state block */
4173 		if (rdev->family >= CHIP_BONAIRE) {
4174 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4175 		} else if (rdev->family >= CHIP_TAHITI) {
4176 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4177 			dws = rdev->rlc.clear_state_size + (256 / 4);
4178 		} else {
4179 			reg_list_num = 0;
4180 			dws = 0;
4181 			for (i = 0; cs_data[i].section != NULL; i++) {
4182 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4183 					reg_list_num++;
4184 					dws += cs_data[i].section[j].reg_count;
4185 				}
4186 			}
4187 			reg_list_blk_index = (3 * reg_list_num + 2);
4188 			dws += reg_list_blk_index;
4189 			rdev->rlc.clear_state_size = dws;
4190 		}
4191 
4192 		if (rdev->rlc.clear_state_obj == NULL) {
4193 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4194 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4195 					     NULL, &rdev->rlc.clear_state_obj);
4196 			if (r) {
4197 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4198 				sumo_rlc_fini(rdev);
4199 				return r;
4200 			}
4201 		}
4202 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4203 		if (unlikely(r != 0)) {
4204 			sumo_rlc_fini(rdev);
4205 			return r;
4206 		}
4207 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4208 				  &rdev->rlc.clear_state_gpu_addr);
4209 		if (r) {
4210 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4211 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4212 			sumo_rlc_fini(rdev);
4213 			return r;
4214 		}
4215 
4216 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4217 		if (r) {
4218 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4219 			sumo_rlc_fini(rdev);
4220 			return r;
4221 		}
4222 		/* set up the cs buffer */
4223 		dst_ptr = rdev->rlc.cs_ptr;
4224 		if (rdev->family >= CHIP_BONAIRE) {
4225 			cik_get_csb_buffer(rdev, dst_ptr);
4226 		} else if (rdev->family >= CHIP_TAHITI) {
4227 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4228 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4229 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4230 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4231 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4232 		} else {
4233 			reg_list_hdr_blk_index = 0;
4234 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4235 			data = upper_32_bits(reg_list_mc_addr);
4236 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4237 			reg_list_hdr_blk_index++;
4238 			for (i = 0; cs_data[i].section != NULL; i++) {
4239 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4240 					reg_num = cs_data[i].section[j].reg_count;
4241 					data = reg_list_mc_addr & 0xffffffff;
4242 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4243 					reg_list_hdr_blk_index++;
4244 
4245 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4246 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4247 					reg_list_hdr_blk_index++;
4248 
4249 					data = 0x08000000 | (reg_num * 4);
4250 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4251 					reg_list_hdr_blk_index++;
4252 
4253 					for (k = 0; k < reg_num; k++) {
4254 						data = cs_data[i].section[j].extent[k];
4255 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4256 					}
4257 					reg_list_mc_addr += reg_num * 4;
4258 					reg_list_blk_index += reg_num;
4259 				}
4260 			}
4261 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4262 		}
4263 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4264 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4265 	}
4266 
4267 	if (rdev->rlc.cp_table_size) {
4268 		if (rdev->rlc.cp_table_obj == NULL) {
4269 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4270 					     PAGE_SIZE, true,
4271 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4272 					     NULL, &rdev->rlc.cp_table_obj);
4273 			if (r) {
4274 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4275 				sumo_rlc_fini(rdev);
4276 				return r;
4277 			}
4278 		}
4279 
4280 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4281 		if (unlikely(r != 0)) {
4282 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4283 			sumo_rlc_fini(rdev);
4284 			return r;
4285 		}
4286 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4287 				  &rdev->rlc.cp_table_gpu_addr);
4288 		if (r) {
4289 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4290 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4291 			sumo_rlc_fini(rdev);
4292 			return r;
4293 		}
4294 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4295 		if (r) {
4296 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4297 			sumo_rlc_fini(rdev);
4298 			return r;
4299 		}
4300 
4301 		cik_init_cp_pg_table(rdev);
4302 
4303 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4304 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4305 
4306 	}
4307 
4308 	return 0;
4309 }
4310 
4311 static void evergreen_rlc_start(struct radeon_device *rdev)
4312 {
4313 	u32 mask = RLC_ENABLE;
4314 
4315 	if (rdev->flags & RADEON_IS_IGP) {
4316 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4317 	}
4318 
4319 	WREG32(RLC_CNTL, mask);
4320 }
4321 
4322 int evergreen_rlc_resume(struct radeon_device *rdev)
4323 {
4324 	u32 i;
4325 	const __be32 *fw_data;
4326 
4327 	if (!rdev->rlc_fw)
4328 		return -EINVAL;
4329 
4330 	r600_rlc_stop(rdev);
4331 
4332 	WREG32(RLC_HB_CNTL, 0);
4333 
4334 	if (rdev->flags & RADEON_IS_IGP) {
4335 		if (rdev->family == CHIP_ARUBA) {
4336 			u32 always_on_bitmap =
4337 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4338 			/* find out the number of active simds */
4339 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4340 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4341 			tmp = hweight32(~tmp);
4342 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4343 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4344 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4345 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4346 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4347 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4348 			}
4349 		} else {
4350 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4351 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4352 		}
4353 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4354 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4355 	} else {
4356 		WREG32(RLC_HB_BASE, 0);
4357 		WREG32(RLC_HB_RPTR, 0);
4358 		WREG32(RLC_HB_WPTR, 0);
4359 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4360 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4361 	}
4362 	WREG32(RLC_MC_CNTL, 0);
4363 	WREG32(RLC_UCODE_CNTL, 0);
4364 
4365 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4366 	if (rdev->family >= CHIP_ARUBA) {
4367 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4368 			WREG32(RLC_UCODE_ADDR, i);
4369 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4370 		}
4371 	} else if (rdev->family >= CHIP_CAYMAN) {
4372 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4373 			WREG32(RLC_UCODE_ADDR, i);
4374 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4375 		}
4376 	} else {
4377 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4378 			WREG32(RLC_UCODE_ADDR, i);
4379 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4380 		}
4381 	}
4382 	WREG32(RLC_UCODE_ADDR, 0);
4383 
4384 	evergreen_rlc_start(rdev);
4385 
4386 	return 0;
4387 }
4388 
4389 /* Interrupts */
4390 
4391 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4392 {
4393 	if (crtc >= rdev->num_crtc)
4394 		return 0;
4395 	else
4396 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4397 }
4398 
4399 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4400 {
4401 	u32 tmp;
4402 
4403 	if (rdev->family >= CHIP_CAYMAN) {
4404 		cayman_cp_int_cntl_setup(rdev, 0,
4405 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4406 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4407 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4408 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4409 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4410 	} else
4411 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4412 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4413 	WREG32(DMA_CNTL, tmp);
4414 	WREG32(GRBM_INT_CNTL, 0);
4415 	WREG32(SRBM_INT_CNTL, 0);
4416 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4417 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4418 	if (rdev->num_crtc >= 4) {
4419 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4420 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4421 	}
4422 	if (rdev->num_crtc >= 6) {
4423 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4424 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4425 	}
4426 
4427 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4428 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4429 	if (rdev->num_crtc >= 4) {
4430 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4431 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4432 	}
4433 	if (rdev->num_crtc >= 6) {
4434 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4435 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4436 	}
4437 
4438 	/* only one DAC on DCE5 */
4439 	if (!ASIC_IS_DCE5(rdev))
4440 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4441 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4442 
4443 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4444 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4445 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4446 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4447 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4448 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4449 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4450 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4451 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4452 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4453 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4454 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4455 
4456 }
4457 
4458 int evergreen_irq_set(struct radeon_device *rdev)
4459 {
4460 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4461 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4462 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4463 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4464 	u32 grbm_int_cntl = 0;
4465 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4466 	u32 dma_cntl, dma_cntl1 = 0;
4467 	u32 thermal_int = 0;
4468 
4469 	if (!rdev->irq.installed) {
4470 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4471 		return -EINVAL;
4472 	}
4473 	/* don't enable anything if the ih is disabled */
4474 	if (!rdev->ih.enabled) {
4475 		r600_disable_interrupts(rdev);
4476 		/* force the active interrupt state to all disabled */
4477 		evergreen_disable_interrupt_state(rdev);
4478 		return 0;
4479 	}
4480 
4481 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4482 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4483 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4484 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4485 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4486 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4487 	if (rdev->family == CHIP_ARUBA)
4488 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4489 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4490 	else
4491 		thermal_int = RREG32(CG_THERMAL_INT) &
4492 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4493 
4494 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4495 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4496 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4497 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4498 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4499 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4500 
4501 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4502 
4503 	if (rdev->family >= CHIP_CAYMAN) {
4504 		/* enable CP interrupts on all rings */
4505 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4506 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4507 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4508 		}
4509 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4510 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4511 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4512 		}
4513 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4514 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4515 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4516 		}
4517 	} else {
4518 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4519 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4520 			cp_int_cntl |= RB_INT_ENABLE;
4521 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4522 		}
4523 	}
4524 
4525 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4526 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4527 		dma_cntl |= TRAP_ENABLE;
4528 	}
4529 
4530 	if (rdev->family >= CHIP_CAYMAN) {
4531 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4532 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4533 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4534 			dma_cntl1 |= TRAP_ENABLE;
4535 		}
4536 	}
4537 
4538 	if (rdev->irq.dpm_thermal) {
4539 		DRM_DEBUG("dpm thermal\n");
4540 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4541 	}
4542 
4543 	if (rdev->irq.crtc_vblank_int[0] ||
4544 	    atomic_read(&rdev->irq.pflip[0])) {
4545 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4546 		crtc1 |= VBLANK_INT_MASK;
4547 	}
4548 	if (rdev->irq.crtc_vblank_int[1] ||
4549 	    atomic_read(&rdev->irq.pflip[1])) {
4550 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4551 		crtc2 |= VBLANK_INT_MASK;
4552 	}
4553 	if (rdev->irq.crtc_vblank_int[2] ||
4554 	    atomic_read(&rdev->irq.pflip[2])) {
4555 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4556 		crtc3 |= VBLANK_INT_MASK;
4557 	}
4558 	if (rdev->irq.crtc_vblank_int[3] ||
4559 	    atomic_read(&rdev->irq.pflip[3])) {
4560 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4561 		crtc4 |= VBLANK_INT_MASK;
4562 	}
4563 	if (rdev->irq.crtc_vblank_int[4] ||
4564 	    atomic_read(&rdev->irq.pflip[4])) {
4565 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4566 		crtc5 |= VBLANK_INT_MASK;
4567 	}
4568 	if (rdev->irq.crtc_vblank_int[5] ||
4569 	    atomic_read(&rdev->irq.pflip[5])) {
4570 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4571 		crtc6 |= VBLANK_INT_MASK;
4572 	}
4573 	if (rdev->irq.hpd[0]) {
4574 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4575 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4576 	}
4577 	if (rdev->irq.hpd[1]) {
4578 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4579 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4580 	}
4581 	if (rdev->irq.hpd[2]) {
4582 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4583 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4584 	}
4585 	if (rdev->irq.hpd[3]) {
4586 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4587 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4588 	}
4589 	if (rdev->irq.hpd[4]) {
4590 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4591 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4592 	}
4593 	if (rdev->irq.hpd[5]) {
4594 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4595 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4596 	}
4597 	if (rdev->irq.afmt[0]) {
4598 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4599 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4600 	}
4601 	if (rdev->irq.afmt[1]) {
4602 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4603 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4604 	}
4605 	if (rdev->irq.afmt[2]) {
4606 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4607 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4608 	}
4609 	if (rdev->irq.afmt[3]) {
4610 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4611 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4612 	}
4613 	if (rdev->irq.afmt[4]) {
4614 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4615 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4616 	}
4617 	if (rdev->irq.afmt[5]) {
4618 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4619 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4620 	}
4621 
4622 	if (rdev->family >= CHIP_CAYMAN) {
4623 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4624 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4625 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4626 	} else
4627 		WREG32(CP_INT_CNTL, cp_int_cntl);
4628 
4629 	WREG32(DMA_CNTL, dma_cntl);
4630 
4631 	if (rdev->family >= CHIP_CAYMAN)
4632 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4633 
4634 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4635 
4636 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4637 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4638 	if (rdev->num_crtc >= 4) {
4639 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4640 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4641 	}
4642 	if (rdev->num_crtc >= 6) {
4643 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4644 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4645 	}
4646 
4647 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4648 	       GRPH_PFLIP_INT_MASK);
4649 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4650 	       GRPH_PFLIP_INT_MASK);
4651 	if (rdev->num_crtc >= 4) {
4652 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4653 		       GRPH_PFLIP_INT_MASK);
4654 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4655 		       GRPH_PFLIP_INT_MASK);
4656 	}
4657 	if (rdev->num_crtc >= 6) {
4658 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4659 		       GRPH_PFLIP_INT_MASK);
4660 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4661 		       GRPH_PFLIP_INT_MASK);
4662 	}
4663 
4664 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4665 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4666 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4667 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4668 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4669 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4670 	if (rdev->family == CHIP_ARUBA)
4671 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4672 	else
4673 		WREG32(CG_THERMAL_INT, thermal_int);
4674 
4675 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4676 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4677 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4678 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4679 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4680 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4681 
4682 	/* posting read */
4683 	RREG32(SRBM_STATUS);
4684 
4685 	return 0;
4686 }
4687 
4688 static void evergreen_irq_ack(struct radeon_device *rdev)
4689 {
4690 	u32 tmp;
4691 
4692 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4693 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4694 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4695 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4696 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4697 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4698 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4699 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4700 	if (rdev->num_crtc >= 4) {
4701 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4702 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4703 	}
4704 	if (rdev->num_crtc >= 6) {
4705 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4706 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4707 	}
4708 
4709 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4710 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4711 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4712 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4713 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4714 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4715 
4716 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4717 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4718 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4719 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4720 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4721 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4722 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4723 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4724 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4725 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4726 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4727 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4728 
4729 	if (rdev->num_crtc >= 4) {
4730 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4731 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4732 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4733 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4734 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4735 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4736 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4737 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4738 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4739 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4740 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4741 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4742 	}
4743 
4744 	if (rdev->num_crtc >= 6) {
4745 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4746 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4747 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4748 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4749 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4750 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4751 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4752 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4753 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4754 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4755 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4756 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4757 	}
4758 
4759 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4760 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4761 		tmp |= DC_HPDx_INT_ACK;
4762 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4763 	}
4764 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4765 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4766 		tmp |= DC_HPDx_INT_ACK;
4767 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4768 	}
4769 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4770 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4771 		tmp |= DC_HPDx_INT_ACK;
4772 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4773 	}
4774 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4775 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4776 		tmp |= DC_HPDx_INT_ACK;
4777 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4778 	}
4779 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4780 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4781 		tmp |= DC_HPDx_INT_ACK;
4782 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4783 	}
4784 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4785 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4786 		tmp |= DC_HPDx_INT_ACK;
4787 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4788 	}
4789 
4790 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4791 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4792 		tmp |= DC_HPDx_RX_INT_ACK;
4793 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4794 	}
4795 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4796 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4797 		tmp |= DC_HPDx_RX_INT_ACK;
4798 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4799 	}
4800 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4801 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4802 		tmp |= DC_HPDx_RX_INT_ACK;
4803 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4804 	}
4805 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4806 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4807 		tmp |= DC_HPDx_RX_INT_ACK;
4808 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4809 	}
4810 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4811 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4812 		tmp |= DC_HPDx_RX_INT_ACK;
4813 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4814 	}
4815 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4816 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4817 		tmp |= DC_HPDx_RX_INT_ACK;
4818 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4819 	}
4820 
4821 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4822 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4823 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4824 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4825 	}
4826 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4827 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4828 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4829 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4830 	}
4831 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4832 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4833 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4834 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4835 	}
4836 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4837 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4838 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4839 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4840 	}
4841 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4842 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4843 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4844 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4845 	}
4846 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4847 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4848 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4849 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4850 	}
4851 }
4852 
4853 static void evergreen_irq_disable(struct radeon_device *rdev)
4854 {
4855 	r600_disable_interrupts(rdev);
4856 	/* Wait and acknowledge irq */
4857 	mdelay(1);
4858 	evergreen_irq_ack(rdev);
4859 	evergreen_disable_interrupt_state(rdev);
4860 }
4861 
4862 void evergreen_irq_suspend(struct radeon_device *rdev)
4863 {
4864 	evergreen_irq_disable(rdev);
4865 	r600_rlc_stop(rdev);
4866 }
4867 
4868 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4869 {
4870 	u32 wptr, tmp;
4871 
4872 	if (rdev->wb.enabled)
4873 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4874 	else
4875 		wptr = RREG32(IH_RB_WPTR);
4876 
4877 	if (wptr & RB_OVERFLOW) {
4878 		wptr &= ~RB_OVERFLOW;
4879 		/* When a ring buffer overflow happen start parsing interrupt
4880 		 * from the last not overwritten vector (wptr + 16). Hopefully
4881 		 * this should allow us to catchup.
4882 		 */
4883 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4884 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4885 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4886 		tmp = RREG32(IH_RB_CNTL);
4887 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4888 		WREG32(IH_RB_CNTL, tmp);
4889 	}
4890 	return (wptr & rdev->ih.ptr_mask);
4891 }
4892 
4893 int evergreen_irq_process(struct radeon_device *rdev)
4894 {
4895 	u32 wptr;
4896 	u32 rptr;
4897 	u32 src_id, src_data;
4898 	u32 ring_index;
4899 	bool queue_hotplug = false;
4900 	bool queue_hdmi = false;
4901 	bool queue_dp = false;
4902 	bool queue_thermal = false;
4903 	u32 status, addr;
4904 
4905 	if (!rdev->ih.enabled || rdev->shutdown)
4906 		return IRQ_NONE;
4907 
4908 	wptr = evergreen_get_ih_wptr(rdev);
4909 
4910 restart_ih:
4911 	/* is somebody else already processing irqs? */
4912 	if (atomic_xchg(&rdev->ih.lock, 1))
4913 		return IRQ_NONE;
4914 
4915 	rptr = rdev->ih.rptr;
4916 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4917 
4918 	/* Order reading of wptr vs. reading of IH ring data */
4919 	rmb();
4920 
4921 	/* display interrupts */
4922 	evergreen_irq_ack(rdev);
4923 
4924 	while (rptr != wptr) {
4925 		/* wptr/rptr are in bytes! */
4926 		ring_index = rptr / 4;
4927 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4928 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4929 
4930 		switch (src_id) {
4931 		case 1: /* D1 vblank/vline */
4932 			switch (src_data) {
4933 			case 0: /* D1 vblank */
4934 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
4935 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4936 
4937 				if (rdev->irq.crtc_vblank_int[0]) {
4938 					drm_handle_vblank(rdev->ddev, 0);
4939 					rdev->pm.vblank_sync = true;
4940 					wake_up(&rdev->irq.vblank_queue);
4941 				}
4942 				if (atomic_read(&rdev->irq.pflip[0]))
4943 					radeon_crtc_handle_vblank(rdev, 0);
4944 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4945 				DRM_DEBUG("IH: D1 vblank\n");
4946 
4947 				break;
4948 			case 1: /* D1 vline */
4949 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
4950 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4951 
4952 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4953 				DRM_DEBUG("IH: D1 vline\n");
4954 
4955 				break;
4956 			default:
4957 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4958 				break;
4959 			}
4960 			break;
4961 		case 2: /* D2 vblank/vline */
4962 			switch (src_data) {
4963 			case 0: /* D2 vblank */
4964 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
4965 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4966 
4967 				if (rdev->irq.crtc_vblank_int[1]) {
4968 					drm_handle_vblank(rdev->ddev, 1);
4969 					rdev->pm.vblank_sync = true;
4970 					wake_up(&rdev->irq.vblank_queue);
4971 				}
4972 				if (atomic_read(&rdev->irq.pflip[1]))
4973 					radeon_crtc_handle_vblank(rdev, 1);
4974 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4975 				DRM_DEBUG("IH: D2 vblank\n");
4976 
4977 				break;
4978 			case 1: /* D2 vline */
4979 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
4980 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4981 
4982 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4983 				DRM_DEBUG("IH: D2 vline\n");
4984 
4985 				break;
4986 			default:
4987 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4988 				break;
4989 			}
4990 			break;
4991 		case 3: /* D3 vblank/vline */
4992 			switch (src_data) {
4993 			case 0: /* D3 vblank */
4994 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
4995 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
4996 
4997 				if (rdev->irq.crtc_vblank_int[2]) {
4998 					drm_handle_vblank(rdev->ddev, 2);
4999 					rdev->pm.vblank_sync = true;
5000 					wake_up(&rdev->irq.vblank_queue);
5001 				}
5002 				if (atomic_read(&rdev->irq.pflip[2]))
5003 					radeon_crtc_handle_vblank(rdev, 2);
5004 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5005 				DRM_DEBUG("IH: D3 vblank\n");
5006 
5007 				break;
5008 			case 1: /* D3 vline */
5009 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5010 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5011 
5012 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5013 				DRM_DEBUG("IH: D3 vline\n");
5014 
5015 				break;
5016 			default:
5017 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5018 				break;
5019 			}
5020 			break;
5021 		case 4: /* D4 vblank/vline */
5022 			switch (src_data) {
5023 			case 0: /* D4 vblank */
5024 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5025 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5026 
5027 				if (rdev->irq.crtc_vblank_int[3]) {
5028 					drm_handle_vblank(rdev->ddev, 3);
5029 					rdev->pm.vblank_sync = true;
5030 					wake_up(&rdev->irq.vblank_queue);
5031 				}
5032 				if (atomic_read(&rdev->irq.pflip[3]))
5033 					radeon_crtc_handle_vblank(rdev, 3);
5034 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5035 				DRM_DEBUG("IH: D4 vblank\n");
5036 
5037 				break;
5038 			case 1: /* D4 vline */
5039 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5040 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5041 
5042 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5043 				DRM_DEBUG("IH: D4 vline\n");
5044 
5045 				break;
5046 			default:
5047 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5048 				break;
5049 			}
5050 			break;
5051 		case 5: /* D5 vblank/vline */
5052 			switch (src_data) {
5053 			case 0: /* D5 vblank */
5054 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5055 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5056 
5057 				if (rdev->irq.crtc_vblank_int[4]) {
5058 					drm_handle_vblank(rdev->ddev, 4);
5059 					rdev->pm.vblank_sync = true;
5060 					wake_up(&rdev->irq.vblank_queue);
5061 				}
5062 				if (atomic_read(&rdev->irq.pflip[4]))
5063 					radeon_crtc_handle_vblank(rdev, 4);
5064 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5065 				DRM_DEBUG("IH: D5 vblank\n");
5066 
5067 				break;
5068 			case 1: /* D5 vline */
5069 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5070 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5071 
5072 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5073 				DRM_DEBUG("IH: D5 vline\n");
5074 
5075 				break;
5076 			default:
5077 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5078 				break;
5079 			}
5080 			break;
5081 		case 6: /* D6 vblank/vline */
5082 			switch (src_data) {
5083 			case 0: /* D6 vblank */
5084 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5085 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5086 
5087 				if (rdev->irq.crtc_vblank_int[5]) {
5088 					drm_handle_vblank(rdev->ddev, 5);
5089 					rdev->pm.vblank_sync = true;
5090 					wake_up(&rdev->irq.vblank_queue);
5091 				}
5092 				if (atomic_read(&rdev->irq.pflip[5]))
5093 					radeon_crtc_handle_vblank(rdev, 5);
5094 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5095 				DRM_DEBUG("IH: D6 vblank\n");
5096 
5097 				break;
5098 			case 1: /* D6 vline */
5099 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5100 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5101 
5102 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5103 				DRM_DEBUG("IH: D6 vline\n");
5104 
5105 				break;
5106 			default:
5107 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5108 				break;
5109 			}
5110 			break;
5111 		case 8: /* D1 page flip */
5112 		case 10: /* D2 page flip */
5113 		case 12: /* D3 page flip */
5114 		case 14: /* D4 page flip */
5115 		case 16: /* D5 page flip */
5116 		case 18: /* D6 page flip */
5117 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5118 			if (radeon_use_pflipirq > 0)
5119 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5120 			break;
5121 		case 42: /* HPD hotplug */
5122 			switch (src_data) {
5123 			case 0:
5124 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5125 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5126 
5127 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5128 				queue_hotplug = true;
5129 				DRM_DEBUG("IH: HPD1\n");
5130 				break;
5131 			case 1:
5132 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5133 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5134 
5135 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5136 				queue_hotplug = true;
5137 				DRM_DEBUG("IH: HPD2\n");
5138 				break;
5139 			case 2:
5140 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5141 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5142 
5143 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5144 				queue_hotplug = true;
5145 				DRM_DEBUG("IH: HPD3\n");
5146 				break;
5147 			case 3:
5148 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5149 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5150 
5151 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5152 				queue_hotplug = true;
5153 				DRM_DEBUG("IH: HPD4\n");
5154 				break;
5155 			case 4:
5156 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5157 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5158 
5159 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5160 				queue_hotplug = true;
5161 				DRM_DEBUG("IH: HPD5\n");
5162 				break;
5163 			case 5:
5164 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5165 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5166 
5167 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5168 				queue_hotplug = true;
5169 				DRM_DEBUG("IH: HPD6\n");
5170 				break;
5171 			case 6:
5172 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5173 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5174 
5175 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5176 				queue_dp = true;
5177 				DRM_DEBUG("IH: HPD_RX 1\n");
5178 				break;
5179 			case 7:
5180 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5181 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5182 
5183 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5184 				queue_dp = true;
5185 				DRM_DEBUG("IH: HPD_RX 2\n");
5186 				break;
5187 			case 8:
5188 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5189 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5190 
5191 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5192 				queue_dp = true;
5193 				DRM_DEBUG("IH: HPD_RX 3\n");
5194 				break;
5195 			case 9:
5196 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5197 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5198 
5199 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5200 				queue_dp = true;
5201 				DRM_DEBUG("IH: HPD_RX 4\n");
5202 				break;
5203 			case 10:
5204 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5205 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5206 
5207 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5208 				queue_dp = true;
5209 				DRM_DEBUG("IH: HPD_RX 5\n");
5210 				break;
5211 			case 11:
5212 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5213 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5214 
5215 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5216 				queue_dp = true;
5217 				DRM_DEBUG("IH: HPD_RX 6\n");
5218 				break;
5219 			default:
5220 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5221 				break;
5222 			}
5223 			break;
5224 		case 44: /* hdmi */
5225 			switch (src_data) {
5226 			case 0:
5227 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5228 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5229 
5230 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5231 				queue_hdmi = true;
5232 				DRM_DEBUG("IH: HDMI0\n");
5233 				break;
5234 			case 1:
5235 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5236 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5237 
5238 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5239 				queue_hdmi = true;
5240 				DRM_DEBUG("IH: HDMI1\n");
5241 				break;
5242 			case 2:
5243 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5244 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5245 
5246 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5247 				queue_hdmi = true;
5248 				DRM_DEBUG("IH: HDMI2\n");
5249 				break;
5250 			case 3:
5251 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5252 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5253 
5254 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5255 				queue_hdmi = true;
5256 				DRM_DEBUG("IH: HDMI3\n");
5257 				break;
5258 			case 4:
5259 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5260 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5261 
5262 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5263 				queue_hdmi = true;
5264 				DRM_DEBUG("IH: HDMI4\n");
5265 				break;
5266 			case 5:
5267 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5268 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5269 
5270 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5271 				queue_hdmi = true;
5272 				DRM_DEBUG("IH: HDMI5\n");
5273 				break;
5274 			default:
5275 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5276 				break;
5277 			}
5278 		case 96:
5279 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5280 			WREG32(SRBM_INT_ACK, 0x1);
5281 			break;
5282 		case 124: /* UVD */
5283 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5284 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5285 			break;
5286 		case 146:
5287 		case 147:
5288 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5289 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5290 			/* reset addr and status */
5291 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5292 			if (addr == 0x0 && status == 0x0)
5293 				break;
5294 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5295 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5296 				addr);
5297 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5298 				status);
5299 			cayman_vm_decode_fault(rdev, status, addr);
5300 			break;
5301 		case 176: /* CP_INT in ring buffer */
5302 		case 177: /* CP_INT in IB1 */
5303 		case 178: /* CP_INT in IB2 */
5304 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5305 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5306 			break;
5307 		case 181: /* CP EOP event */
5308 			DRM_DEBUG("IH: CP EOP\n");
5309 			if (rdev->family >= CHIP_CAYMAN) {
5310 				switch (src_data) {
5311 				case 0:
5312 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5313 					break;
5314 				case 1:
5315 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5316 					break;
5317 				case 2:
5318 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5319 					break;
5320 				}
5321 			} else
5322 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5323 			break;
5324 		case 224: /* DMA trap event */
5325 			DRM_DEBUG("IH: DMA trap\n");
5326 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5327 			break;
5328 		case 230: /* thermal low to high */
5329 			DRM_DEBUG("IH: thermal low to high\n");
5330 			rdev->pm.dpm.thermal.high_to_low = false;
5331 			queue_thermal = true;
5332 			break;
5333 		case 231: /* thermal high to low */
5334 			DRM_DEBUG("IH: thermal high to low\n");
5335 			rdev->pm.dpm.thermal.high_to_low = true;
5336 			queue_thermal = true;
5337 			break;
5338 		case 233: /* GUI IDLE */
5339 			DRM_DEBUG("IH: GUI idle\n");
5340 			break;
5341 		case 244: /* DMA trap event */
5342 			if (rdev->family >= CHIP_CAYMAN) {
5343 				DRM_DEBUG("IH: DMA1 trap\n");
5344 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5345 			}
5346 			break;
5347 		default:
5348 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5349 			break;
5350 		}
5351 
5352 		/* wptr/rptr are in bytes! */
5353 		rptr += 16;
5354 		rptr &= rdev->ih.ptr_mask;
5355 		WREG32(IH_RB_RPTR, rptr);
5356 	}
5357 	if (queue_dp)
5358 		schedule_work(&rdev->dp_work);
5359 	if (queue_hotplug)
5360 		schedule_delayed_work(&rdev->hotplug_work, 0);
5361 	if (queue_hdmi)
5362 		schedule_work(&rdev->audio_work);
5363 	if (queue_thermal && rdev->pm.dpm_enabled)
5364 		schedule_work(&rdev->pm.dpm.thermal.work);
5365 	rdev->ih.rptr = rptr;
5366 	atomic_set(&rdev->ih.lock, 0);
5367 
5368 	/* make sure wptr hasn't changed while processing */
5369 	wptr = evergreen_get_ih_wptr(rdev);
5370 	if (wptr != rptr)
5371 		goto restart_ih;
5372 
5373 	return IRQ_HANDLED;
5374 }
5375 
5376 static void evergreen_uvd_init(struct radeon_device *rdev)
5377 {
5378 	int r;
5379 
5380 	if (!rdev->has_uvd)
5381 		return;
5382 
5383 	r = radeon_uvd_init(rdev);
5384 	if (r) {
5385 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5386 		/*
5387 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5388 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5389 		 * there. So it is pointless to try to go through that code
5390 		 * hence why we disable uvd here.
5391 		 */
5392 		rdev->has_uvd = 0;
5393 		return;
5394 	}
5395 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5396 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5397 }
5398 
5399 static void evergreen_uvd_start(struct radeon_device *rdev)
5400 {
5401 	int r;
5402 
5403 	if (!rdev->has_uvd)
5404 		return;
5405 
5406 	r = uvd_v2_2_resume(rdev);
5407 	if (r) {
5408 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5409 		goto error;
5410 	}
5411 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5412 	if (r) {
5413 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5414 		goto error;
5415 	}
5416 	return;
5417 
5418 error:
5419 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5420 }
5421 
5422 static void evergreen_uvd_resume(struct radeon_device *rdev)
5423 {
5424 	struct radeon_ring *ring;
5425 	int r;
5426 
5427 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5428 		return;
5429 
5430 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5431 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, RADEON_CP_PACKET2);
5432 	if (r) {
5433 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5434 		return;
5435 	}
5436 	r = uvd_v1_0_init(rdev);
5437 	if (r) {
5438 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5439 		return;
5440 	}
5441 }
5442 
5443 static int evergreen_startup(struct radeon_device *rdev)
5444 {
5445 	struct radeon_ring *ring;
5446 	int r;
5447 
5448 	/* enable pcie gen2 link */
5449 	evergreen_pcie_gen2_enable(rdev);
5450 	/* enable aspm */
5451 	evergreen_program_aspm(rdev);
5452 
5453 	/* scratch needs to be initialized before MC */
5454 	r = r600_vram_scratch_init(rdev);
5455 	if (r)
5456 		return r;
5457 
5458 	evergreen_mc_program(rdev);
5459 
5460 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5461 		r = ni_mc_load_microcode(rdev);
5462 		if (r) {
5463 			DRM_ERROR("Failed to load MC firmware!\n");
5464 			return r;
5465 		}
5466 	}
5467 
5468 	if (rdev->flags & RADEON_IS_AGP) {
5469 		evergreen_agp_enable(rdev);
5470 	} else {
5471 		r = evergreen_pcie_gart_enable(rdev);
5472 		if (r)
5473 			return r;
5474 	}
5475 	evergreen_gpu_init(rdev);
5476 
5477 	/* allocate rlc buffers */
5478 	if (rdev->flags & RADEON_IS_IGP) {
5479 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5480 		rdev->rlc.reg_list_size =
5481 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5482 		rdev->rlc.cs_data = evergreen_cs_data;
5483 		r = sumo_rlc_init(rdev);
5484 		if (r) {
5485 			DRM_ERROR("Failed to init rlc BOs!\n");
5486 			return r;
5487 		}
5488 	}
5489 
5490 	/* allocate wb buffer */
5491 	r = radeon_wb_init(rdev);
5492 	if (r)
5493 		return r;
5494 
5495 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5496 	if (r) {
5497 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5498 		return r;
5499 	}
5500 
5501 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5502 	if (r) {
5503 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5504 		return r;
5505 	}
5506 
5507 	evergreen_uvd_start(rdev);
5508 
5509 	/* Enable IRQ */
5510 	if (!rdev->irq.installed) {
5511 		r = radeon_irq_kms_init(rdev);
5512 		if (r)
5513 			return r;
5514 	}
5515 
5516 	r = r600_irq_init(rdev);
5517 	if (r) {
5518 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5519 		radeon_irq_kms_fini(rdev);
5520 		return r;
5521 	}
5522 	evergreen_irq_set(rdev);
5523 
5524 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5525 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5526 			     RADEON_CP_PACKET2);
5527 	if (r)
5528 		return r;
5529 
5530 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5531 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5532 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5533 	if (r)
5534 		return r;
5535 
5536 	r = evergreen_cp_load_microcode(rdev);
5537 	if (r)
5538 		return r;
5539 	r = evergreen_cp_resume(rdev);
5540 	if (r)
5541 		return r;
5542 	r = r600_dma_resume(rdev);
5543 	if (r)
5544 		return r;
5545 
5546 	evergreen_uvd_resume(rdev);
5547 
5548 	r = radeon_ib_pool_init(rdev);
5549 	if (r) {
5550 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5551 		return r;
5552 	}
5553 
5554 	r = radeon_audio_init(rdev);
5555 	if (r) {
5556 		DRM_ERROR("radeon: audio init failed\n");
5557 		return r;
5558 	}
5559 
5560 	return 0;
5561 }
5562 
5563 int evergreen_resume(struct radeon_device *rdev)
5564 {
5565 	int r;
5566 
5567 	/* reset the asic, the gfx blocks are often in a bad state
5568 	 * after the driver is unloaded or after a resume
5569 	 */
5570 	if (radeon_asic_reset(rdev))
5571 		dev_warn(rdev->dev, "GPU reset failed !\n");
5572 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5573 	 * posting will perform necessary task to bring back GPU into good
5574 	 * shape.
5575 	 */
5576 	/* post card */
5577 	atom_asic_init(rdev->mode_info.atom_context);
5578 
5579 	/* init golden registers */
5580 	evergreen_init_golden_registers(rdev);
5581 
5582 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5583 		radeon_pm_resume(rdev);
5584 
5585 	rdev->accel_working = true;
5586 	r = evergreen_startup(rdev);
5587 	if (r) {
5588 		DRM_ERROR("evergreen startup failed on resume\n");
5589 		rdev->accel_working = false;
5590 		return r;
5591 	}
5592 
5593 	return r;
5594 
5595 }
5596 
5597 int evergreen_suspend(struct radeon_device *rdev)
5598 {
5599 	radeon_pm_suspend(rdev);
5600 	radeon_audio_fini(rdev);
5601 	if (rdev->has_uvd) {
5602 		uvd_v1_0_fini(rdev);
5603 		radeon_uvd_suspend(rdev);
5604 	}
5605 	r700_cp_stop(rdev);
5606 	r600_dma_stop(rdev);
5607 	evergreen_irq_suspend(rdev);
5608 	radeon_wb_disable(rdev);
5609 	evergreen_pcie_gart_disable(rdev);
5610 
5611 	return 0;
5612 }
5613 
5614 /* Plan is to move initialization in that function and use
5615  * helper function so that radeon_device_init pretty much
5616  * do nothing more than calling asic specific function. This
5617  * should also allow to remove a bunch of callback function
5618  * like vram_info.
5619  */
5620 int evergreen_init(struct radeon_device *rdev)
5621 {
5622 	int r;
5623 
5624 	/* Read BIOS */
5625 	if (!radeon_get_bios(rdev)) {
5626 		if (ASIC_IS_AVIVO(rdev))
5627 			return -EINVAL;
5628 	}
5629 	/* Must be an ATOMBIOS */
5630 	if (!rdev->is_atom_bios) {
5631 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5632 		return -EINVAL;
5633 	}
5634 	r = radeon_atombios_init(rdev);
5635 	if (r)
5636 		return r;
5637 	/* reset the asic, the gfx blocks are often in a bad state
5638 	 * after the driver is unloaded or after a resume
5639 	 */
5640 	if (radeon_asic_reset(rdev))
5641 		dev_warn(rdev->dev, "GPU reset failed !\n");
5642 	/* Post card if necessary */
5643 	if (!radeon_card_posted(rdev)) {
5644 		if (!rdev->bios) {
5645 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5646 			return -EINVAL;
5647 		}
5648 		DRM_INFO("GPU not posted. posting now...\n");
5649 		atom_asic_init(rdev->mode_info.atom_context);
5650 	}
5651 	/* init golden registers */
5652 	evergreen_init_golden_registers(rdev);
5653 	/* Initialize scratch registers */
5654 	r600_scratch_init(rdev);
5655 	/* Initialize surface registers */
5656 	radeon_surface_init(rdev);
5657 	/* Initialize clocks */
5658 	radeon_get_clock_info(rdev->ddev);
5659 	/* Fence driver */
5660 	r = radeon_fence_driver_init(rdev);
5661 	if (r)
5662 		return r;
5663 	/* initialize AGP */
5664 	if (rdev->flags & RADEON_IS_AGP) {
5665 		r = radeon_agp_init(rdev);
5666 		if (r)
5667 			radeon_agp_disable(rdev);
5668 	}
5669 	/* initialize memory controller */
5670 	r = evergreen_mc_init(rdev);
5671 	if (r)
5672 		return r;
5673 	/* Memory manager */
5674 	r = radeon_bo_init(rdev);
5675 	if (r)
5676 		return r;
5677 
5678 	if (ASIC_IS_DCE5(rdev)) {
5679 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5680 			r = ni_init_microcode(rdev);
5681 			if (r) {
5682 				DRM_ERROR("Failed to load firmware!\n");
5683 				return r;
5684 			}
5685 		}
5686 	} else {
5687 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5688 			r = r600_init_microcode(rdev);
5689 			if (r) {
5690 				DRM_ERROR("Failed to load firmware!\n");
5691 				return r;
5692 			}
5693 		}
5694 	}
5695 
5696 	/* Initialize power management */
5697 	radeon_pm_init(rdev);
5698 
5699 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5700 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5701 
5702 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5703 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5704 
5705 	evergreen_uvd_init(rdev);
5706 
5707 	rdev->ih.ring_obj = NULL;
5708 	r600_ih_ring_init(rdev, 64 * 1024);
5709 
5710 	r = r600_pcie_gart_init(rdev);
5711 	if (r)
5712 		return r;
5713 
5714 	rdev->accel_working = true;
5715 	r = evergreen_startup(rdev);
5716 	if (r) {
5717 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5718 		r700_cp_fini(rdev);
5719 		r600_dma_fini(rdev);
5720 		r600_irq_fini(rdev);
5721 		if (rdev->flags & RADEON_IS_IGP)
5722 			sumo_rlc_fini(rdev);
5723 		radeon_wb_fini(rdev);
5724 		radeon_ib_pool_fini(rdev);
5725 		radeon_irq_kms_fini(rdev);
5726 		evergreen_pcie_gart_fini(rdev);
5727 		rdev->accel_working = false;
5728 	}
5729 
5730 	/* Don't start up if the MC ucode is missing on BTC parts.
5731 	 * The default clocks and voltages before the MC ucode
5732 	 * is loaded are not suffient for advanced operations.
5733 	 */
5734 	if (ASIC_IS_DCE5(rdev)) {
5735 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5736 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5737 			return -EINVAL;
5738 		}
5739 	}
5740 
5741 	return 0;
5742 }
5743 
5744 void evergreen_fini(struct radeon_device *rdev)
5745 {
5746 	radeon_pm_fini(rdev);
5747 	radeon_audio_fini(rdev);
5748 	r700_cp_fini(rdev);
5749 	r600_dma_fini(rdev);
5750 	r600_irq_fini(rdev);
5751 	if (rdev->flags & RADEON_IS_IGP)
5752 		sumo_rlc_fini(rdev);
5753 	radeon_wb_fini(rdev);
5754 	radeon_ib_pool_fini(rdev);
5755 	radeon_irq_kms_fini(rdev);
5756 	uvd_v1_0_fini(rdev);
5757 	radeon_uvd_fini(rdev);
5758 	evergreen_pcie_gart_fini(rdev);
5759 	r600_vram_scratch_fini(rdev);
5760 	radeon_gem_fini(rdev);
5761 	radeon_fence_driver_fini(rdev);
5762 	radeon_agp_fini(rdev);
5763 	radeon_bo_fini(rdev);
5764 	radeon_atombios_fini(rdev);
5765 	kfree(rdev->bios);
5766 	rdev->bios = NULL;
5767 }
5768 
5769 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5770 {
5771 	u32 link_width_cntl, speed_cntl;
5772 
5773 	if (radeon_pcie_gen2 == 0)
5774 		return;
5775 
5776 	if (rdev->flags & RADEON_IS_IGP)
5777 		return;
5778 
5779 	if (!(rdev->flags & RADEON_IS_PCIE))
5780 		return;
5781 
5782 	/* x2 cards have a special sequence */
5783 	if (ASIC_IS_X2(rdev))
5784 		return;
5785 
5786 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5787 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5788 		return;
5789 
5790 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5791 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5792 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5793 		return;
5794 	}
5795 
5796 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5797 
5798 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5799 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5800 
5801 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5802 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5803 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5804 
5805 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5806 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5807 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5808 
5809 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5810 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5811 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5812 
5813 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5814 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5815 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5816 
5817 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5818 		speed_cntl |= LC_GEN2_EN_STRAP;
5819 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5820 
5821 	} else {
5822 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5823 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5824 		if (1)
5825 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5826 		else
5827 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5828 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5829 	}
5830 }
5831 
5832 void evergreen_program_aspm(struct radeon_device *rdev)
5833 {
5834 	u32 data, orig;
5835 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5836 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5837 	/* fusion_platform = true
5838 	 * if the system is a fusion system
5839 	 * (APU or DGPU in a fusion system).
5840 	 * todo: check if the system is a fusion platform.
5841 	 */
5842 	bool fusion_platform = false;
5843 
5844 	if (radeon_aspm == 0)
5845 		return;
5846 
5847 	if (!(rdev->flags & RADEON_IS_PCIE))
5848 		return;
5849 
5850 	switch (rdev->family) {
5851 	case CHIP_CYPRESS:
5852 	case CHIP_HEMLOCK:
5853 	case CHIP_JUNIPER:
5854 	case CHIP_REDWOOD:
5855 	case CHIP_CEDAR:
5856 	case CHIP_SUMO:
5857 	case CHIP_SUMO2:
5858 	case CHIP_PALM:
5859 	case CHIP_ARUBA:
5860 		disable_l0s = true;
5861 		break;
5862 	default:
5863 		disable_l0s = false;
5864 		break;
5865 	}
5866 
5867 	if (rdev->flags & RADEON_IS_IGP)
5868 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5869 
5870 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5871 	if (fusion_platform)
5872 		data &= ~MULTI_PIF;
5873 	else
5874 		data |= MULTI_PIF;
5875 	if (data != orig)
5876 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5877 
5878 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5879 	if (fusion_platform)
5880 		data &= ~MULTI_PIF;
5881 	else
5882 		data |= MULTI_PIF;
5883 	if (data != orig)
5884 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5885 
5886 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5887 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5888 	if (!disable_l0s) {
5889 		if (rdev->family >= CHIP_BARTS)
5890 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5891 		else
5892 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5893 	}
5894 
5895 	if (!disable_l1) {
5896 		if (rdev->family >= CHIP_BARTS)
5897 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5898 		else
5899 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5900 
5901 		if (!disable_plloff_in_l1) {
5902 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5903 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5904 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5905 			if (data != orig)
5906 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5907 
5908 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5909 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5910 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5911 			if (data != orig)
5912 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5913 
5914 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5915 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5916 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5917 			if (data != orig)
5918 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5919 
5920 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5921 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5922 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5923 			if (data != orig)
5924 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5925 
5926 			if (rdev->family >= CHIP_BARTS) {
5927 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5928 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5929 				data |= PLL_RAMP_UP_TIME_0(4);
5930 				if (data != orig)
5931 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5932 
5933 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5934 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5935 				data |= PLL_RAMP_UP_TIME_1(4);
5936 				if (data != orig)
5937 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5938 
5939 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5940 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5941 				data |= PLL_RAMP_UP_TIME_0(4);
5942 				if (data != orig)
5943 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5944 
5945 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5946 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5947 				data |= PLL_RAMP_UP_TIME_1(4);
5948 				if (data != orig)
5949 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5950 			}
5951 
5952 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5953 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5954 			data |= LC_DYN_LANES_PWR_STATE(3);
5955 			if (data != orig)
5956 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5957 
5958 			if (rdev->family >= CHIP_BARTS) {
5959 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5960 				data &= ~LS2_EXIT_TIME_MASK;
5961 				data |= LS2_EXIT_TIME(1);
5962 				if (data != orig)
5963 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5964 
5965 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5966 				data &= ~LS2_EXIT_TIME_MASK;
5967 				data |= LS2_EXIT_TIME(1);
5968 				if (data != orig)
5969 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5970 			}
5971 		}
5972 	}
5973 
5974 	/* evergreen parts only */
5975 	if (rdev->family < CHIP_BARTS)
5976 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5977 
5978 	if (pcie_lc_cntl != pcie_lc_cntl_old)
5979 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5980 }
5981