xref: /linux/drivers/gpu/drm/amd/amdgpu/si.c (revision 163b099146b85d1b05bd2eaa045acbeee25c29e4)
1  /*
2   * Copyright 2015 Advanced Micro Devices, Inc.
3   *
4   * Permission is hereby granted, free of charge, to any person obtaining a
5   * copy of this software and associated documentation files (the "Software"),
6   * to deal in the Software without restriction, including without limitation
7   * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8   * and/or sell copies of the Software, and to permit persons to whom the
9   * Software is furnished to do so, subject to the following conditions:
10   *
11   * The above copyright notice and this permission notice shall be included in
12   * all copies or substantial portions of the Software.
13   *
14   * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15   * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16   * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17   * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18   * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19   * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20   * OTHER DEALINGS IN THE SOFTWARE.
21   *
22   */
23  
24  #include <linux/firmware.h>
25  #include <linux/slab.h>
26  #include <linux/module.h>
27  #include <linux/pci.h>
28  
29  #include "amdgpu.h"
30  #include "amdgpu_atombios.h"
31  #include "amdgpu_ih.h"
32  #include "amdgpu_uvd.h"
33  #include "amdgpu_vce.h"
34  #include "atom.h"
35  #include "amd_pcie.h"
36  #include "si_dpm.h"
37  #include "sid.h"
38  #include "si_ih.h"
39  #include "gfx_v6_0.h"
40  #include "gmc_v6_0.h"
41  #include "si_dma.h"
42  #include "dce_v6_0.h"
43  #include "si.h"
44  #include "uvd_v3_1.h"
45  #include "dce_virtual.h"
46  #include "gca/gfx_6_0_d.h"
47  #include "oss/oss_1_0_d.h"
48  #include "oss/oss_1_0_sh_mask.h"
49  #include "gmc/gmc_6_0_d.h"
50  #include "dce/dce_6_0_d.h"
51  #include "uvd/uvd_4_0_d.h"
52  #include "bif/bif_3_0_d.h"
53  #include "bif/bif_3_0_sh_mask.h"
54  
55  #include "amdgpu_dm.h"
56  
57  static const u32 tahiti_golden_registers[] =
58  {
59  	mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011,
60  	mmCB_HW_CONTROL, 0x00010000, 0x00018208,
61  	mmDB_DEBUG, 0xffffffff, 0x00000000,
62  	mmDB_DEBUG2, 0xf00fffff, 0x00000400,
63  	mmDB_DEBUG3, 0x0002021c, 0x00020200,
64  	mmDCI_CLK_CNTL, 0x00000080, 0x00000000,
65  	0x340c, 0x000000c0, 0x00800040,
66  	0x360c, 0x000000c0, 0x00800040,
67  	mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070,
68  	mmFBC_MISC, 0x00200000, 0x50100000,
69  	mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011,
70  	mmMC_ARB_WTM_CNTL_RD, 0x00000003, 0x000007ff,
71  	mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000,
72  	mmPA_CL_ENHANCE, 0xf000001f, 0x00000007,
73  	mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff,
74  	mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000,
75  	mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000,
76  	mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x2a00126a,
77  	0x000c, 0xffffffff, 0x0040,
78  	0x000d, 0x00000040, 0x00004040,
79  	mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000,
80  	mmSQ_DED_CNT, 0x01ff1f3f, 0x00000000,
81  	mmSQ_SEC_CNT, 0x01ff1f3f, 0x00000000,
82  	mmSX_DEBUG_1, 0x0000007f, 0x00000020,
83  	mmTA_CNTL_AUX, 0x00010000, 0x00010000,
84  	mmTCP_ADDR_CONFIG, 0x00000200, 0x000002fb,
85  	mmTCP_CHAN_STEER_HI, 0xffffffff, 0x0000543b,
86  	mmTCP_CHAN_STEER_LO, 0xffffffff, 0xa9210876,
87  	mmVGT_FIFO_DEPTHS, 0xffffffff, 0x000fff40,
88  	mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010,
89  	mmVM_CONTEXT0_CNTL, 0x20000000, 0x20fffed8,
90  	mmVM_L2_CG, 0x000c0fc0, 0x000c0400,
91  	mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff,
92  	mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff,
93  	mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff,
94  	mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff,
95  };
96  
97  static const u32 tahiti_golden_registers2[] =
98  {
99  	mmMCIF_MEM_CONTROL, 0x00000001, 0x00000001,
100  };
101  
102  static const u32 tahiti_golden_rlc_registers[] =
103  {
104  	mmGB_ADDR_CONFIG, 0xffffffff, 0x12011003,
105  	mmRLC_LB_PARAMS, 0xffffffff, 0x00601005,
106  	0x311f, 0xffffffff, 0x10104040,
107  	0x3122, 0xffffffff, 0x0100000a,
108  	mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00000800,
109  	mmRLC_LB_CNTL, 0xffffffff, 0x800000f4,
110  	mmUVD_CGC_GATE, 0x00000008, 0x00000000,
111  };
112  
113  static const u32 pitcairn_golden_registers[] =
114  {
115  	mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011,
116  	mmCB_HW_CONTROL, 0x00010000, 0x00018208,
117  	mmDB_DEBUG, 0xffffffff, 0x00000000,
118  	mmDB_DEBUG2, 0xf00fffff, 0x00000400,
119  	mmDB_DEBUG3, 0x0002021c, 0x00020200,
120  	mmDCI_CLK_CNTL, 0x00000080, 0x00000000,
121  	0x340c, 0x000300c0, 0x00800040,
122  	0x360c, 0x000300c0, 0x00800040,
123  	mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070,
124  	mmFBC_MISC, 0x00200000, 0x50100000,
125  	mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011,
126  	mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2,
127  	mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000,
128  	mmPA_CL_ENHANCE, 0xf000001f, 0x00000007,
129  	mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff,
130  	mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000,
131  	mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000,
132  	mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x2a00126a,
133  	0x000c, 0xffffffff, 0x0040,
134  	0x000d, 0x00000040, 0x00004040,
135  	mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000,
136  	mmSX_DEBUG_1, 0x0000007f, 0x00000020,
137  	mmTA_CNTL_AUX, 0x00010000, 0x00010000,
138  	mmTCP_ADDR_CONFIG, 0x000003ff, 0x000000f7,
139  	mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000,
140  	mmTCP_CHAN_STEER_LO, 0xffffffff, 0x32761054,
141  	mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010,
142  	mmVM_L2_CG, 0x000c0fc0, 0x000c0400,
143  	mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff,
144  	mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff,
145  	mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff,
146  	mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff,
147  };
148  
149  static const u32 pitcairn_golden_rlc_registers[] =
150  {
151  	mmGB_ADDR_CONFIG, 0xffffffff, 0x12011003,
152  	mmRLC_LB_PARAMS, 0xffffffff, 0x00601004,
153  	0x311f, 0xffffffff, 0x10102020,
154  	0x3122, 0xffffffff, 0x01000020,
155  	mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00000800,
156  	mmRLC_LB_CNTL, 0xffffffff, 0x800000a4,
157  };
158  
159  static const u32 verde_pg_init[] =
160  {
161  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x40000,
162  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x200010ff,
163  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
164  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
165  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
166  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
167  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
168  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x7007,
169  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x300010ff,
170  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
171  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
172  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
173  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
174  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
175  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x400000,
176  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x100010ff,
177  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
178  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
179  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
180  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
181  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
182  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x120200,
183  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x500010ff,
184  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
185  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
186  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
187  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
188  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
189  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x1e1e16,
190  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x600010ff,
191  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
192  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
193  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
194  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
195  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
196  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x171f1e,
197  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x700010ff,
198  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
199  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
200  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
201  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
202  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
203  	mmGMCON_PGFSM_WRITE, 0xffffffff, 0x0,
204  	mmGMCON_PGFSM_CONFIG, 0xffffffff, 0x9ff,
205  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x0,
206  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10000800,
207  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xf,
208  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xf,
209  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x4,
210  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1000051e,
211  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xffff,
212  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xffff,
213  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x8,
214  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x80500,
215  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x12,
216  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x9050c,
217  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1d,
218  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xb052c,
219  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x2a,
220  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1053e,
221  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x2d,
222  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10546,
223  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x30,
224  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xa054e,
225  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x3c,
226  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1055f,
227  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x3f,
228  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10567,
229  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x42,
230  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1056f,
231  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x45,
232  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x10572,
233  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x48,
234  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x20575,
235  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x4c,
236  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x190801,
237  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x67,
238  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1082a,
239  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x6a,
240  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1b082d,
241  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x87,
242  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x310851,
243  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xba,
244  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x891,
245  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xbc,
246  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x893,
247  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xbe,
248  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x20895,
249  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xc2,
250  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x20899,
251  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xc6,
252  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x2089d,
253  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xca,
254  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x8a1,
255  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xcc,
256  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x8a3,
257  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xce,
258  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x308a5,
259  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0xd3,
260  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x6d08cd,
261  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x142,
262  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x2000095a,
263  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x1,
264  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x144,
265  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x301f095b,
266  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x165,
267  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xc094d,
268  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x173,
269  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xf096d,
270  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x184,
271  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x15097f,
272  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x19b,
273  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xc0998,
274  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1a9,
275  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x409a7,
276  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1af,
277  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0xcdc,
278  	mmGMCON_RENG_RAM_INDEX, 0xffffffff, 0x1b1,
279  	mmGMCON_RENG_RAM_DATA, 0xffffffff, 0x800,
280  	mmGMCON_RENG_EXECUTE, 0xffffffff, 0x6c9b2000,
281  	mmGMCON_MISC2, 0xfc00, 0x2000,
282  	mmGMCON_MISC3, 0xffffffff, 0xfc0,
283  	mmMC_PMG_AUTO_CFG, 0x00000100, 0x100,
284  };
285  
286  static const u32 verde_golden_rlc_registers[] =
287  {
288  	mmGB_ADDR_CONFIG, 0xffffffff, 0x02010002,
289  	mmRLC_LB_PARAMS, 0xffffffff, 0x033f1005,
290  	0x311f, 0xffffffff, 0x10808020,
291  	0x3122, 0xffffffff, 0x00800008,
292  	mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00001000,
293  	mmRLC_LB_CNTL, 0xffffffff, 0x80010014,
294  };
295  
296  static const u32 verde_golden_registers[] =
297  {
298  	mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011,
299  	mmCB_HW_CONTROL, 0x00010000, 0x00018208,
300  	mmDB_DEBUG, 0xffffffff, 0x00000000,
301  	mmDB_DEBUG2, 0xf00fffff, 0x00000400,
302  	mmDB_DEBUG3, 0x0002021c, 0x00020200,
303  	mmDCI_CLK_CNTL, 0x00000080, 0x00000000,
304  	0x340c, 0x000300c0, 0x00800040,
305  	0x360c, 0x000300c0, 0x00800040,
306  	mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070,
307  	mmFBC_MISC, 0x00200000, 0x50100000,
308  	mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011,
309  	mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2,
310  	mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000,
311  	mmPA_CL_ENHANCE, 0xf000001f, 0x00000007,
312  	mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff,
313  	mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000,
314  	mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000,
315  	mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x0000124a,
316  	0x000c, 0xffffffff, 0x0040,
317  	0x000d, 0x00000040, 0x00004040,
318  	mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000,
319  	mmSQ_DED_CNT, 0x01ff1f3f, 0x00000000,
320  	mmSQ_SEC_CNT, 0x01ff1f3f, 0x00000000,
321  	mmSX_DEBUG_1, 0x0000007f, 0x00000020,
322  	mmTA_CNTL_AUX, 0x00010000, 0x00010000,
323  	mmTCP_ADDR_CONFIG, 0x000003ff, 0x00000003,
324  	mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000,
325  	mmTCP_CHAN_STEER_LO, 0xffffffff, 0x00001032,
326  	mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010,
327  	mmVM_L2_CG, 0x000c0fc0, 0x000c0400,
328  	mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff,
329  	mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff,
330  	mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff,
331  	mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff,
332  };
333  
334  static const u32 oland_golden_registers[] =
335  {
336  	mmAZALIA_SCLK_CONTROL, 0x00000030, 0x00000011,
337  	mmCB_HW_CONTROL, 0x00010000, 0x00018208,
338  	mmDB_DEBUG, 0xffffffff, 0x00000000,
339  	mmDB_DEBUG2, 0xf00fffff, 0x00000400,
340  	mmDB_DEBUG3, 0x0002021c, 0x00020200,
341  	mmDCI_CLK_CNTL, 0x00000080, 0x00000000,
342  	0x340c, 0x000300c0, 0x00800040,
343  	0x360c, 0x000300c0, 0x00800040,
344  	mmFBC_DEBUG_COMP, 0x000000f0, 0x00000070,
345  	mmFBC_MISC, 0x00200000, 0x50100000,
346  	mmDIG0_HDMI_CONTROL, 0x31000311, 0x00000011,
347  	mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2,
348  	mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000,
349  	mmPA_CL_ENHANCE, 0xf000001f, 0x00000007,
350  	mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff,
351  	mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000,
352  	mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000,
353  	mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x00000082,
354  	0x000c, 0xffffffff, 0x0040,
355  	0x000d, 0x00000040, 0x00004040,
356  	mmSPI_CONFIG_CNTL, 0x07ffffff, 0x03000000,
357  	mmSX_DEBUG_1, 0x0000007f, 0x00000020,
358  	mmTA_CNTL_AUX, 0x00010000, 0x00010000,
359  	mmTCP_ADDR_CONFIG, 0x000003ff, 0x000000f3,
360  	mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000,
361  	mmTCP_CHAN_STEER_LO, 0xffffffff, 0x00003210,
362  	mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010,
363  	mmVM_L2_CG, 0x000c0fc0, 0x000c0400,
364  	mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff,
365  	mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff,
366  	mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff,
367  	mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff,
368  
369  };
370  
371  static const u32 oland_golden_rlc_registers[] =
372  {
373  	mmGB_ADDR_CONFIG, 0xffffffff, 0x02010002,
374  	mmRLC_LB_PARAMS, 0xffffffff, 0x00601005,
375  	0x311f, 0xffffffff, 0x10104040,
376  	0x3122, 0xffffffff, 0x0100000a,
377  	mmRLC_LB_CNTR_MAX, 0xffffffff, 0x00000800,
378  	mmRLC_LB_CNTL, 0xffffffff, 0x800000f4,
379  };
380  
381  static const u32 hainan_golden_registers[] =
382  {
383  	0x17bc, 0x00000030, 0x00000011,
384  	mmCB_HW_CONTROL, 0x00010000, 0x00018208,
385  	mmDB_DEBUG, 0xffffffff, 0x00000000,
386  	mmDB_DEBUG2, 0xf00fffff, 0x00000400,
387  	mmDB_DEBUG3, 0x0002021c, 0x00020200,
388  	0x031e, 0x00000080, 0x00000000,
389  	0x3430, 0xff000fff, 0x00000100,
390  	0x340c, 0x000300c0, 0x00800040,
391  	0x3630, 0xff000fff, 0x00000100,
392  	0x360c, 0x000300c0, 0x00800040,
393  	0x16ec, 0x000000f0, 0x00000070,
394  	0x16f0, 0x00200000, 0x50100000,
395  	0x1c0c, 0x31000311, 0x00000011,
396  	mmMC_SEQ_PMG_PG_HWCNTL, 0x00073ffe, 0x000022a2,
397  	mmMC_XPB_P2P_BAR_CFG, 0x000007ff, 0x00000000,
398  	mmPA_CL_ENHANCE, 0xf000001f, 0x00000007,
399  	mmPA_SC_FORCE_EOV_MAX_CNTS, 0xffffffff, 0x00ffffff,
400  	mmPA_SC_LINE_STIPPLE_STATE, 0x0000ff0f, 0x00000000,
401  	mmPA_SC_MODE_CNTL_1, 0x07ffffff, 0x4e000000,
402  	mmPA_SC_RASTER_CONFIG, 0x3f3f3fff, 0x00000000,
403  	0x000c, 0xffffffff, 0x0040,
404  	0x000d, 0x00000040, 0x00004040,
405  	mmSPI_CONFIG_CNTL, 0x03e00000, 0x03600000,
406  	mmSX_DEBUG_1, 0x0000007f, 0x00000020,
407  	mmTA_CNTL_AUX, 0x00010000, 0x00010000,
408  	mmTCP_ADDR_CONFIG, 0x000003ff, 0x000000f1,
409  	mmTCP_CHAN_STEER_HI, 0xffffffff, 0x00000000,
410  	mmTCP_CHAN_STEER_LO, 0xffffffff, 0x00003210,
411  	mmVGT_GS_VERTEX_REUSE, 0x0000001f, 0x00000010,
412  	mmVM_L2_CG, 0x000c0fc0, 0x000c0400,
413  	mmVM_PRT_APERTURE0_LOW_ADDR, 0x0fffffff, 0xffffffff,
414  	mmVM_PRT_APERTURE1_LOW_ADDR, 0x0fffffff, 0x0fffffff,
415  	mmVM_PRT_APERTURE2_LOW_ADDR, 0x0fffffff, 0x0fffffff,
416  	mmVM_PRT_APERTURE3_LOW_ADDR, 0x0fffffff, 0x0fffffff,
417  };
418  
419  static const u32 hainan_golden_registers2[] =
420  {
421  	mmGB_ADDR_CONFIG, 0xffffffff, 0x2011003,
422  };
423  
424  static const u32 tahiti_mgcg_cgcg_init[] =
425  {
426  	mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc,
427  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
428  	mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
429  	mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100,
430  	mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100,
431  	mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100,
432  	mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100,
433  	mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100,
434  	mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100,
435  	mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100,
436  	mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100,
437  	mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100,
438  	mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100,
439  	mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100,
440  	mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100,
441  	mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100,
442  	mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100,
443  	mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100,
444  	mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100,
445  	mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100,
446  	mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100,
447  	mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100,
448  	mmTA_CGTT_CTRL, 0xffffffff, 0x00000100,
449  	mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
450  	mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
451  	mmTD_CGTT_CTRL, 0xffffffff, 0x00000100,
452  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
453  	0x2458, 0xffffffff, 0x00010000,
454  	0x2459, 0xffffffff, 0x00030002,
455  	0x245a, 0xffffffff, 0x00040007,
456  	0x245b, 0xffffffff, 0x00060005,
457  	0x245c, 0xffffffff, 0x00090008,
458  	0x245d, 0xffffffff, 0x00020001,
459  	0x245e, 0xffffffff, 0x00040003,
460  	0x245f, 0xffffffff, 0x00000007,
461  	0x2460, 0xffffffff, 0x00060005,
462  	0x2461, 0xffffffff, 0x00090008,
463  	0x2462, 0xffffffff, 0x00030002,
464  	0x2463, 0xffffffff, 0x00050004,
465  	0x2464, 0xffffffff, 0x00000008,
466  	0x2465, 0xffffffff, 0x00070006,
467  	0x2466, 0xffffffff, 0x000a0009,
468  	0x2467, 0xffffffff, 0x00040003,
469  	0x2468, 0xffffffff, 0x00060005,
470  	0x2469, 0xffffffff, 0x00000009,
471  	0x246a, 0xffffffff, 0x00080007,
472  	0x246b, 0xffffffff, 0x000b000a,
473  	0x246c, 0xffffffff, 0x00050004,
474  	0x246d, 0xffffffff, 0x00070006,
475  	0x246e, 0xffffffff, 0x0008000b,
476  	0x246f, 0xffffffff, 0x000a0009,
477  	0x2470, 0xffffffff, 0x000d000c,
478  	0x2471, 0xffffffff, 0x00060005,
479  	0x2472, 0xffffffff, 0x00080007,
480  	0x2473, 0xffffffff, 0x0000000b,
481  	0x2474, 0xffffffff, 0x000a0009,
482  	0x2475, 0xffffffff, 0x000d000c,
483  	0x2476, 0xffffffff, 0x00070006,
484  	0x2477, 0xffffffff, 0x00090008,
485  	0x2478, 0xffffffff, 0x0000000c,
486  	0x2479, 0xffffffff, 0x000b000a,
487  	0x247a, 0xffffffff, 0x000e000d,
488  	0x247b, 0xffffffff, 0x00080007,
489  	0x247c, 0xffffffff, 0x000a0009,
490  	0x247d, 0xffffffff, 0x0000000d,
491  	0x247e, 0xffffffff, 0x000c000b,
492  	0x247f, 0xffffffff, 0x000f000e,
493  	0x2480, 0xffffffff, 0x00090008,
494  	0x2481, 0xffffffff, 0x000b000a,
495  	0x2482, 0xffffffff, 0x000c000f,
496  	0x2483, 0xffffffff, 0x000e000d,
497  	0x2484, 0xffffffff, 0x00110010,
498  	0x2485, 0xffffffff, 0x000a0009,
499  	0x2486, 0xffffffff, 0x000c000b,
500  	0x2487, 0xffffffff, 0x0000000f,
501  	0x2488, 0xffffffff, 0x000e000d,
502  	0x2489, 0xffffffff, 0x00110010,
503  	0x248a, 0xffffffff, 0x000b000a,
504  	0x248b, 0xffffffff, 0x000d000c,
505  	0x248c, 0xffffffff, 0x00000010,
506  	0x248d, 0xffffffff, 0x000f000e,
507  	0x248e, 0xffffffff, 0x00120011,
508  	0x248f, 0xffffffff, 0x000c000b,
509  	0x2490, 0xffffffff, 0x000e000d,
510  	0x2491, 0xffffffff, 0x00000011,
511  	0x2492, 0xffffffff, 0x0010000f,
512  	0x2493, 0xffffffff, 0x00130012,
513  	0x2494, 0xffffffff, 0x000d000c,
514  	0x2495, 0xffffffff, 0x000f000e,
515  	0x2496, 0xffffffff, 0x00100013,
516  	0x2497, 0xffffffff, 0x00120011,
517  	0x2498, 0xffffffff, 0x00150014,
518  	0x2499, 0xffffffff, 0x000e000d,
519  	0x249a, 0xffffffff, 0x0010000f,
520  	0x249b, 0xffffffff, 0x00000013,
521  	0x249c, 0xffffffff, 0x00120011,
522  	0x249d, 0xffffffff, 0x00150014,
523  	0x249e, 0xffffffff, 0x000f000e,
524  	0x249f, 0xffffffff, 0x00110010,
525  	0x24a0, 0xffffffff, 0x00000014,
526  	0x24a1, 0xffffffff, 0x00130012,
527  	0x24a2, 0xffffffff, 0x00160015,
528  	0x24a3, 0xffffffff, 0x0010000f,
529  	0x24a4, 0xffffffff, 0x00120011,
530  	0x24a5, 0xffffffff, 0x00000015,
531  	0x24a6, 0xffffffff, 0x00140013,
532  	0x24a7, 0xffffffff, 0x00170016,
533  	mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200,
534  	mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100,
535  	mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080,
536  	mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f,
537  	0x000c, 0xffffffff, 0x0000001c,
538  	0x000d, 0x000f0000, 0x000f0000,
539  	0x0583, 0xffffffff, 0x00000100,
540  	mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100,
541  	mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000,
542  	mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104,
543  	mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000,
544  	mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000,
545  	mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100,
546  	0x157a, 0x00000001, 0x00000001,
547  	mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001,
548  	mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104,
549  	mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001,
550  	0x3430, 0xfffffff0, 0x00000100,
551  	0x3630, 0xfffffff0, 0x00000100,
552  };
553  static const u32 pitcairn_mgcg_cgcg_init[] =
554  {
555  	mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc,
556  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
557  	mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
558  	mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100,
559  	mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100,
560  	mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100,
561  	mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100,
562  	mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100,
563  	mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100,
564  	mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100,
565  	mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100,
566  	mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100,
567  	mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100,
568  	mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100,
569  	mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100,
570  	mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100,
571  	mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100,
572  	mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100,
573  	mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100,
574  	mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100,
575  	mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100,
576  	mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100,
577  	mmTA_CGTT_CTRL, 0xffffffff, 0x00000100,
578  	mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
579  	mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
580  	mmTD_CGTT_CTRL, 0xffffffff, 0x00000100,
581  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
582  	0x2458, 0xffffffff, 0x00010000,
583  	0x2459, 0xffffffff, 0x00030002,
584  	0x245a, 0xffffffff, 0x00040007,
585  	0x245b, 0xffffffff, 0x00060005,
586  	0x245c, 0xffffffff, 0x00090008,
587  	0x245d, 0xffffffff, 0x00020001,
588  	0x245e, 0xffffffff, 0x00040003,
589  	0x245f, 0xffffffff, 0x00000007,
590  	0x2460, 0xffffffff, 0x00060005,
591  	0x2461, 0xffffffff, 0x00090008,
592  	0x2462, 0xffffffff, 0x00030002,
593  	0x2463, 0xffffffff, 0x00050004,
594  	0x2464, 0xffffffff, 0x00000008,
595  	0x2465, 0xffffffff, 0x00070006,
596  	0x2466, 0xffffffff, 0x000a0009,
597  	0x2467, 0xffffffff, 0x00040003,
598  	0x2468, 0xffffffff, 0x00060005,
599  	0x2469, 0xffffffff, 0x00000009,
600  	0x246a, 0xffffffff, 0x00080007,
601  	0x246b, 0xffffffff, 0x000b000a,
602  	0x246c, 0xffffffff, 0x00050004,
603  	0x246d, 0xffffffff, 0x00070006,
604  	0x246e, 0xffffffff, 0x0008000b,
605  	0x246f, 0xffffffff, 0x000a0009,
606  	0x2470, 0xffffffff, 0x000d000c,
607  	0x2480, 0xffffffff, 0x00090008,
608  	0x2481, 0xffffffff, 0x000b000a,
609  	0x2482, 0xffffffff, 0x000c000f,
610  	0x2483, 0xffffffff, 0x000e000d,
611  	0x2484, 0xffffffff, 0x00110010,
612  	0x2485, 0xffffffff, 0x000a0009,
613  	0x2486, 0xffffffff, 0x000c000b,
614  	0x2487, 0xffffffff, 0x0000000f,
615  	0x2488, 0xffffffff, 0x000e000d,
616  	0x2489, 0xffffffff, 0x00110010,
617  	0x248a, 0xffffffff, 0x000b000a,
618  	0x248b, 0xffffffff, 0x000d000c,
619  	0x248c, 0xffffffff, 0x00000010,
620  	0x248d, 0xffffffff, 0x000f000e,
621  	0x248e, 0xffffffff, 0x00120011,
622  	0x248f, 0xffffffff, 0x000c000b,
623  	0x2490, 0xffffffff, 0x000e000d,
624  	0x2491, 0xffffffff, 0x00000011,
625  	0x2492, 0xffffffff, 0x0010000f,
626  	0x2493, 0xffffffff, 0x00130012,
627  	0x2494, 0xffffffff, 0x000d000c,
628  	0x2495, 0xffffffff, 0x000f000e,
629  	0x2496, 0xffffffff, 0x00100013,
630  	0x2497, 0xffffffff, 0x00120011,
631  	0x2498, 0xffffffff, 0x00150014,
632  	mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200,
633  	mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100,
634  	mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080,
635  	mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f,
636  	0x000c, 0xffffffff, 0x0000001c,
637  	0x000d, 0x000f0000, 0x000f0000,
638  	0x0583, 0xffffffff, 0x00000100,
639  	mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100,
640  	mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000,
641  	mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104,
642  	mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100,
643  	0x157a, 0x00000001, 0x00000001,
644  	mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001,
645  	mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104,
646  	mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001,
647  	0x3430, 0xfffffff0, 0x00000100,
648  	0x3630, 0xfffffff0, 0x00000100,
649  };
650  
651  static const u32 verde_mgcg_cgcg_init[] =
652  {
653  	mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc,
654  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
655  	mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
656  	mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100,
657  	mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100,
658  	mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100,
659  	mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100,
660  	mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100,
661  	mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100,
662  	mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100,
663  	mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100,
664  	mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100,
665  	mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100,
666  	mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100,
667  	mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100,
668  	mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100,
669  	mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100,
670  	mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100,
671  	mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100,
672  	mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100,
673  	mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100,
674  	mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100,
675  	mmTA_CGTT_CTRL, 0xffffffff, 0x00000100,
676  	mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
677  	mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
678  	mmTD_CGTT_CTRL, 0xffffffff, 0x00000100,
679  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
680  	0x2458, 0xffffffff, 0x00010000,
681  	0x2459, 0xffffffff, 0x00030002,
682  	0x245a, 0xffffffff, 0x00040007,
683  	0x245b, 0xffffffff, 0x00060005,
684  	0x245c, 0xffffffff, 0x00090008,
685  	0x245d, 0xffffffff, 0x00020001,
686  	0x245e, 0xffffffff, 0x00040003,
687  	0x245f, 0xffffffff, 0x00000007,
688  	0x2460, 0xffffffff, 0x00060005,
689  	0x2461, 0xffffffff, 0x00090008,
690  	0x2462, 0xffffffff, 0x00030002,
691  	0x2463, 0xffffffff, 0x00050004,
692  	0x2464, 0xffffffff, 0x00000008,
693  	0x2465, 0xffffffff, 0x00070006,
694  	0x2466, 0xffffffff, 0x000a0009,
695  	0x2467, 0xffffffff, 0x00040003,
696  	0x2468, 0xffffffff, 0x00060005,
697  	0x2469, 0xffffffff, 0x00000009,
698  	0x246a, 0xffffffff, 0x00080007,
699  	0x246b, 0xffffffff, 0x000b000a,
700  	0x246c, 0xffffffff, 0x00050004,
701  	0x246d, 0xffffffff, 0x00070006,
702  	0x246e, 0xffffffff, 0x0008000b,
703  	0x246f, 0xffffffff, 0x000a0009,
704  	0x2470, 0xffffffff, 0x000d000c,
705  	0x2480, 0xffffffff, 0x00090008,
706  	0x2481, 0xffffffff, 0x000b000a,
707  	0x2482, 0xffffffff, 0x000c000f,
708  	0x2483, 0xffffffff, 0x000e000d,
709  	0x2484, 0xffffffff, 0x00110010,
710  	0x2485, 0xffffffff, 0x000a0009,
711  	0x2486, 0xffffffff, 0x000c000b,
712  	0x2487, 0xffffffff, 0x0000000f,
713  	0x2488, 0xffffffff, 0x000e000d,
714  	0x2489, 0xffffffff, 0x00110010,
715  	0x248a, 0xffffffff, 0x000b000a,
716  	0x248b, 0xffffffff, 0x000d000c,
717  	0x248c, 0xffffffff, 0x00000010,
718  	0x248d, 0xffffffff, 0x000f000e,
719  	0x248e, 0xffffffff, 0x00120011,
720  	0x248f, 0xffffffff, 0x000c000b,
721  	0x2490, 0xffffffff, 0x000e000d,
722  	0x2491, 0xffffffff, 0x00000011,
723  	0x2492, 0xffffffff, 0x0010000f,
724  	0x2493, 0xffffffff, 0x00130012,
725  	0x2494, 0xffffffff, 0x000d000c,
726  	0x2495, 0xffffffff, 0x000f000e,
727  	0x2496, 0xffffffff, 0x00100013,
728  	0x2497, 0xffffffff, 0x00120011,
729  	0x2498, 0xffffffff, 0x00150014,
730  	mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200,
731  	mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100,
732  	mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080,
733  	mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f,
734  	0x000c, 0xffffffff, 0x0000001c,
735  	0x000d, 0x000f0000, 0x000f0000,
736  	0x0583, 0xffffffff, 0x00000100,
737  	mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100,
738  	mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000,
739  	mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104,
740  	mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000,
741  	mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000,
742  	mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100,
743  	0x157a, 0x00000001, 0x00000001,
744  	mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001,
745  	mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104,
746  	mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001,
747  	0x3430, 0xfffffff0, 0x00000100,
748  	0x3630, 0xfffffff0, 0x00000100,
749  };
750  
751  static const u32 oland_mgcg_cgcg_init[] =
752  {
753  	mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc,
754  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
755  	mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
756  	mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100,
757  	mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100,
758  	mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100,
759  	mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100,
760  	mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100,
761  	mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100,
762  	mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100,
763  	mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100,
764  	mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100,
765  	mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100,
766  	mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100,
767  	mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100,
768  	mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100,
769  	mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100,
770  	mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100,
771  	mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100,
772  	mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100,
773  	mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100,
774  	mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100,
775  	mmTA_CGTT_CTRL, 0xffffffff, 0x00000100,
776  	mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
777  	mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
778  	mmTD_CGTT_CTRL, 0xffffffff, 0x00000100,
779  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
780  	0x2458, 0xffffffff, 0x00010000,
781  	0x2459, 0xffffffff, 0x00030002,
782  	0x245a, 0xffffffff, 0x00040007,
783  	0x245b, 0xffffffff, 0x00060005,
784  	0x245c, 0xffffffff, 0x00090008,
785  	0x245d, 0xffffffff, 0x00020001,
786  	0x245e, 0xffffffff, 0x00040003,
787  	0x245f, 0xffffffff, 0x00000007,
788  	0x2460, 0xffffffff, 0x00060005,
789  	0x2461, 0xffffffff, 0x00090008,
790  	0x2462, 0xffffffff, 0x00030002,
791  	0x2463, 0xffffffff, 0x00050004,
792  	0x2464, 0xffffffff, 0x00000008,
793  	0x2465, 0xffffffff, 0x00070006,
794  	0x2466, 0xffffffff, 0x000a0009,
795  	0x2467, 0xffffffff, 0x00040003,
796  	0x2468, 0xffffffff, 0x00060005,
797  	0x2469, 0xffffffff, 0x00000009,
798  	0x246a, 0xffffffff, 0x00080007,
799  	0x246b, 0xffffffff, 0x000b000a,
800  	0x246c, 0xffffffff, 0x00050004,
801  	0x246d, 0xffffffff, 0x00070006,
802  	0x246e, 0xffffffff, 0x0008000b,
803  	0x246f, 0xffffffff, 0x000a0009,
804  	0x2470, 0xffffffff, 0x000d000c,
805  	0x2471, 0xffffffff, 0x00060005,
806  	0x2472, 0xffffffff, 0x00080007,
807  	0x2473, 0xffffffff, 0x0000000b,
808  	0x2474, 0xffffffff, 0x000a0009,
809  	0x2475, 0xffffffff, 0x000d000c,
810  	mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200,
811  	mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100,
812  	mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080,
813  	mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f,
814  	0x000c, 0xffffffff, 0x0000001c,
815  	0x000d, 0x000f0000, 0x000f0000,
816  	0x0583, 0xffffffff, 0x00000100,
817  	mmXDMA_CLOCK_GATING_CNTL, 0xffffffff, 0x00000100,
818  	mmXDMA_MEM_POWER_CNTL, 0x00000101, 0x00000000,
819  	mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104,
820  	mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000,
821  	mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000,
822  	mmCGTT_DRM_CLK_CTRL0, 0xff000fff, 0x00000100,
823  	0x157a, 0x00000001, 0x00000001,
824  	mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001,
825  	mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104,
826  	mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001,
827  	0x3430, 0xfffffff0, 0x00000100,
828  	0x3630, 0xfffffff0, 0x00000100,
829  };
830  
831  static const u32 hainan_mgcg_cgcg_init[] =
832  {
833  	mmRLC_CGTT_MGCG_OVERRIDE, 0xffffffff, 0xfffffffc,
834  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
835  	mmCB_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
836  	mmCGTT_BCI_CLK_CTRL, 0xffffffff, 0x00000100,
837  	mmCGTT_CP_CLK_CTRL, 0xffffffff, 0x00000100,
838  	mmCGTT_GDS_CLK_CTRL, 0xffffffff, 0x00000100,
839  	mmCGTT_IA_CLK_CTRL, 0xffffffff, 0x06000100,
840  	mmCGTT_PA_CLK_CTRL, 0xffffffff, 0x00000100,
841  	mmCGTT_PC_CLK_CTRL, 0xffffffff, 0x00000100,
842  	mmCGTT_RLC_CLK_CTRL, 0xffffffff, 0x00000100,
843  	mmCGTT_SC_CLK_CTRL, 0xffffffff, 0x00000100,
844  	mmCGTT_SPI_CLK_CTRL, 0xffffffff, 0x00000100,
845  	mmCGTT_SQ_CLK_CTRL, 0xffffffff, 0x00000100,
846  	mmCGTT_SQG_CLK_CTRL, 0xffffffff, 0x00000100,
847  	mmCGTT_SX_CLK_CTRL0, 0xffffffff, 0x00000100,
848  	mmCGTT_SX_CLK_CTRL1, 0xffffffff, 0x00000100,
849  	mmCGTT_SX_CLK_CTRL2, 0xffffffff, 0x00000100,
850  	mmCGTT_SX_CLK_CTRL3, 0xffffffff, 0x00000100,
851  	mmCGTT_TCI_CLK_CTRL, 0xffffffff, 0x00000100,
852  	mmCGTT_TCP_CLK_CTRL, 0xffffffff, 0x00000100,
853  	mmCGTT_VGT_CLK_CTRL, 0xffffffff, 0x06000100,
854  	mmDB_CGTT_CLK_CTRL_0, 0xffffffff, 0x00000100,
855  	mmTA_CGTT_CTRL, 0xffffffff, 0x00000100,
856  	mmTCA_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
857  	mmTCC_CGTT_SCLK_CTRL, 0xffffffff, 0x00000100,
858  	mmTD_CGTT_CTRL, 0xffffffff, 0x00000100,
859  	mmGRBM_GFX_INDEX, 0xffffffff, 0xe0000000,
860  	0x2458, 0xffffffff, 0x00010000,
861  	0x2459, 0xffffffff, 0x00030002,
862  	0x245a, 0xffffffff, 0x00040007,
863  	0x245b, 0xffffffff, 0x00060005,
864  	0x245c, 0xffffffff, 0x00090008,
865  	0x245d, 0xffffffff, 0x00020001,
866  	0x245e, 0xffffffff, 0x00040003,
867  	0x245f, 0xffffffff, 0x00000007,
868  	0x2460, 0xffffffff, 0x00060005,
869  	0x2461, 0xffffffff, 0x00090008,
870  	0x2462, 0xffffffff, 0x00030002,
871  	0x2463, 0xffffffff, 0x00050004,
872  	0x2464, 0xffffffff, 0x00000008,
873  	0x2465, 0xffffffff, 0x00070006,
874  	0x2466, 0xffffffff, 0x000a0009,
875  	0x2467, 0xffffffff, 0x00040003,
876  	0x2468, 0xffffffff, 0x00060005,
877  	0x2469, 0xffffffff, 0x00000009,
878  	0x246a, 0xffffffff, 0x00080007,
879  	0x246b, 0xffffffff, 0x000b000a,
880  	0x246c, 0xffffffff, 0x00050004,
881  	0x246d, 0xffffffff, 0x00070006,
882  	0x246e, 0xffffffff, 0x0008000b,
883  	0x246f, 0xffffffff, 0x000a0009,
884  	0x2470, 0xffffffff, 0x000d000c,
885  	0x2471, 0xffffffff, 0x00060005,
886  	0x2472, 0xffffffff, 0x00080007,
887  	0x2473, 0xffffffff, 0x0000000b,
888  	0x2474, 0xffffffff, 0x000a0009,
889  	0x2475, 0xffffffff, 0x000d000c,
890  	mmCGTS_SM_CTRL_REG, 0xffffffff, 0x96940200,
891  	mmCP_RB_WPTR_POLL_CNTL, 0xffffffff, 0x00900100,
892  	mmRLC_GCPM_GENERAL_3, 0xffffffff, 0x00000080,
893  	mmRLC_CGCG_CGLS_CTRL, 0xffffffff, 0x0020003f,
894  	0x000c, 0xffffffff, 0x0000001c,
895  	0x000d, 0x000f0000, 0x000f0000,
896  	0x0583, 0xffffffff, 0x00000100,
897  	0x0409, 0xffffffff, 0x00000100,
898  	mmMC_MEM_POWER_LS, 0xffffffff, 0x00000104,
899  	mmMC_CITF_MISC_WR_CG, 0x000c0000, 0x000c0000,
900  	mmMC_CITF_MISC_RD_CG, 0x000c0000, 0x000c0000,
901  	mmHDP_MEM_POWER_LS, 0x00000001, 0x00000001,
902  	mmHDP_XDP_CGTT_BLK_CTRL, 0xc0000fff, 0x00000104,
903  	mmCP_MEM_SLP_CNTL, 0x00000001, 0x00000001,
904  	0x3430, 0xfffffff0, 0x00000100,
905  	0x3630, 0xfffffff0, 0x00000100,
906  };
907  
908  static u32 si_pcie_rreg(struct amdgpu_device *adev, u32 reg)
909  {
910  	unsigned long flags;
911  	u32 r;
912  
913  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
914  	WREG32(AMDGPU_PCIE_INDEX, reg);
915  	(void)RREG32(AMDGPU_PCIE_INDEX);
916  	r = RREG32(AMDGPU_PCIE_DATA);
917  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
918  	return r;
919  }
920  
921  static void si_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
922  {
923  	unsigned long flags;
924  
925  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
926  	WREG32(AMDGPU_PCIE_INDEX, reg);
927  	(void)RREG32(AMDGPU_PCIE_INDEX);
928  	WREG32(AMDGPU_PCIE_DATA, v);
929  	(void)RREG32(AMDGPU_PCIE_DATA);
930  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
931  }
932  
933  static u32 si_pciep_rreg(struct amdgpu_device *adev, u32 reg)
934  {
935  	unsigned long flags;
936  	u32 r;
937  
938  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
939  	WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
940  	(void)RREG32(PCIE_PORT_INDEX);
941  	r = RREG32(PCIE_PORT_DATA);
942  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
943  	return r;
944  }
945  
946  static void si_pciep_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
947  {
948  	unsigned long flags;
949  
950  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
951  	WREG32(PCIE_PORT_INDEX, ((reg) & 0xff));
952  	(void)RREG32(PCIE_PORT_INDEX);
953  	WREG32(PCIE_PORT_DATA, (v));
954  	(void)RREG32(PCIE_PORT_DATA);
955  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
956  }
957  
958  static u32 si_smc_rreg(struct amdgpu_device *adev, u32 reg)
959  {
960  	unsigned long flags;
961  	u32 r;
962  
963  	spin_lock_irqsave(&adev->smc_idx_lock, flags);
964  	WREG32(SMC_IND_INDEX_0, (reg));
965  	r = RREG32(SMC_IND_DATA_0);
966  	spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
967  	return r;
968  }
969  
970  static void si_smc_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
971  {
972  	unsigned long flags;
973  
974  	spin_lock_irqsave(&adev->smc_idx_lock, flags);
975  	WREG32(SMC_IND_INDEX_0, (reg));
976  	WREG32(SMC_IND_DATA_0, (v));
977  	spin_unlock_irqrestore(&adev->smc_idx_lock, flags);
978  }
979  
980  static u32 si_uvd_ctx_rreg(struct amdgpu_device *adev, u32 reg)
981  {
982  	unsigned long flags;
983  	u32 r;
984  
985  	spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags);
986  	WREG32(mmUVD_CTX_INDEX, ((reg) & 0x1ff));
987  	r = RREG32(mmUVD_CTX_DATA);
988  	spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags);
989  	return r;
990  }
991  
992  static void si_uvd_ctx_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
993  {
994  	unsigned long flags;
995  
996  	spin_lock_irqsave(&adev->uvd_ctx_idx_lock, flags);
997  	WREG32(mmUVD_CTX_INDEX, ((reg) & 0x1ff));
998  	WREG32(mmUVD_CTX_DATA, (v));
999  	spin_unlock_irqrestore(&adev->uvd_ctx_idx_lock, flags);
1000  }
1001  
1002  static struct amdgpu_allowed_register_entry si_allowed_read_registers[] = {
1003  	{GRBM_STATUS},
1004  	{mmGRBM_STATUS2},
1005  	{mmGRBM_STATUS_SE0},
1006  	{mmGRBM_STATUS_SE1},
1007  	{mmSRBM_STATUS},
1008  	{mmSRBM_STATUS2},
1009  	{DMA_STATUS_REG + DMA0_REGISTER_OFFSET},
1010  	{DMA_STATUS_REG + DMA1_REGISTER_OFFSET},
1011  	{mmCP_STAT},
1012  	{mmCP_STALLED_STAT1},
1013  	{mmCP_STALLED_STAT2},
1014  	{mmCP_STALLED_STAT3},
1015  	{GB_ADDR_CONFIG},
1016  	{MC_ARB_RAMCFG},
1017  	{GB_TILE_MODE0},
1018  	{GB_TILE_MODE1},
1019  	{GB_TILE_MODE2},
1020  	{GB_TILE_MODE3},
1021  	{GB_TILE_MODE4},
1022  	{GB_TILE_MODE5},
1023  	{GB_TILE_MODE6},
1024  	{GB_TILE_MODE7},
1025  	{GB_TILE_MODE8},
1026  	{GB_TILE_MODE9},
1027  	{GB_TILE_MODE10},
1028  	{GB_TILE_MODE11},
1029  	{GB_TILE_MODE12},
1030  	{GB_TILE_MODE13},
1031  	{GB_TILE_MODE14},
1032  	{GB_TILE_MODE15},
1033  	{GB_TILE_MODE16},
1034  	{GB_TILE_MODE17},
1035  	{GB_TILE_MODE18},
1036  	{GB_TILE_MODE19},
1037  	{GB_TILE_MODE20},
1038  	{GB_TILE_MODE21},
1039  	{GB_TILE_MODE22},
1040  	{GB_TILE_MODE23},
1041  	{GB_TILE_MODE24},
1042  	{GB_TILE_MODE25},
1043  	{GB_TILE_MODE26},
1044  	{GB_TILE_MODE27},
1045  	{GB_TILE_MODE28},
1046  	{GB_TILE_MODE29},
1047  	{GB_TILE_MODE30},
1048  	{GB_TILE_MODE31},
1049  	{CC_RB_BACKEND_DISABLE, true},
1050  	{GC_USER_RB_BACKEND_DISABLE, true},
1051  	{PA_SC_RASTER_CONFIG, true},
1052  };
1053  
1054  static uint32_t si_get_register_value(struct amdgpu_device *adev,
1055  				      bool indexed, u32 se_num,
1056  				      u32 sh_num, u32 reg_offset)
1057  {
1058  	if (indexed) {
1059  		uint32_t val;
1060  		unsigned se_idx = (se_num == 0xffffffff) ? 0 : se_num;
1061  		unsigned sh_idx = (sh_num == 0xffffffff) ? 0 : sh_num;
1062  
1063  		switch (reg_offset) {
1064  		case mmCC_RB_BACKEND_DISABLE:
1065  			return adev->gfx.config.rb_config[se_idx][sh_idx].rb_backend_disable;
1066  		case mmGC_USER_RB_BACKEND_DISABLE:
1067  			return adev->gfx.config.rb_config[se_idx][sh_idx].user_rb_backend_disable;
1068  		case mmPA_SC_RASTER_CONFIG:
1069  			return adev->gfx.config.rb_config[se_idx][sh_idx].raster_config;
1070  		}
1071  
1072  		mutex_lock(&adev->grbm_idx_mutex);
1073  		if (se_num != 0xffffffff || sh_num != 0xffffffff)
1074  			amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff);
1075  
1076  		val = RREG32(reg_offset);
1077  
1078  		if (se_num != 0xffffffff || sh_num != 0xffffffff)
1079  			amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
1080  		mutex_unlock(&adev->grbm_idx_mutex);
1081  		return val;
1082  	} else {
1083  		unsigned idx;
1084  
1085  		switch (reg_offset) {
1086  		case mmGB_ADDR_CONFIG:
1087  			return adev->gfx.config.gb_addr_config;
1088  		case mmMC_ARB_RAMCFG:
1089  			return adev->gfx.config.mc_arb_ramcfg;
1090  		case mmGB_TILE_MODE0:
1091  		case mmGB_TILE_MODE1:
1092  		case mmGB_TILE_MODE2:
1093  		case mmGB_TILE_MODE3:
1094  		case mmGB_TILE_MODE4:
1095  		case mmGB_TILE_MODE5:
1096  		case mmGB_TILE_MODE6:
1097  		case mmGB_TILE_MODE7:
1098  		case mmGB_TILE_MODE8:
1099  		case mmGB_TILE_MODE9:
1100  		case mmGB_TILE_MODE10:
1101  		case mmGB_TILE_MODE11:
1102  		case mmGB_TILE_MODE12:
1103  		case mmGB_TILE_MODE13:
1104  		case mmGB_TILE_MODE14:
1105  		case mmGB_TILE_MODE15:
1106  		case mmGB_TILE_MODE16:
1107  		case mmGB_TILE_MODE17:
1108  		case mmGB_TILE_MODE18:
1109  		case mmGB_TILE_MODE19:
1110  		case mmGB_TILE_MODE20:
1111  		case mmGB_TILE_MODE21:
1112  		case mmGB_TILE_MODE22:
1113  		case mmGB_TILE_MODE23:
1114  		case mmGB_TILE_MODE24:
1115  		case mmGB_TILE_MODE25:
1116  		case mmGB_TILE_MODE26:
1117  		case mmGB_TILE_MODE27:
1118  		case mmGB_TILE_MODE28:
1119  		case mmGB_TILE_MODE29:
1120  		case mmGB_TILE_MODE30:
1121  		case mmGB_TILE_MODE31:
1122  			idx = (reg_offset - mmGB_TILE_MODE0);
1123  			return adev->gfx.config.tile_mode_array[idx];
1124  		default:
1125  			return RREG32(reg_offset);
1126  		}
1127  	}
1128  }
1129  static int si_read_register(struct amdgpu_device *adev, u32 se_num,
1130  			     u32 sh_num, u32 reg_offset, u32 *value)
1131  {
1132  	uint32_t i;
1133  
1134  	*value = 0;
1135  	for (i = 0; i < ARRAY_SIZE(si_allowed_read_registers); i++) {
1136  		bool indexed = si_allowed_read_registers[i].grbm_indexed;
1137  
1138  		if (reg_offset != si_allowed_read_registers[i].reg_offset)
1139  			continue;
1140  
1141  		*value = si_get_register_value(adev, indexed, se_num, sh_num,
1142  					       reg_offset);
1143  		return 0;
1144  	}
1145  	return -EINVAL;
1146  }
1147  
1148  static bool si_read_disabled_bios(struct amdgpu_device *adev)
1149  {
1150  	u32 bus_cntl;
1151  	u32 d1vga_control = 0;
1152  	u32 d2vga_control = 0;
1153  	u32 vga_render_control = 0;
1154  	u32 rom_cntl;
1155  	bool r;
1156  
1157  	bus_cntl = RREG32(R600_BUS_CNTL);
1158  	if (adev->mode_info.num_crtc) {
1159  		d1vga_control = RREG32(AVIVO_D1VGA_CONTROL);
1160  		d2vga_control = RREG32(AVIVO_D2VGA_CONTROL);
1161  		vga_render_control = RREG32(VGA_RENDER_CONTROL);
1162  	}
1163  	rom_cntl = RREG32(R600_ROM_CNTL);
1164  
1165  	/* enable the rom */
1166  	WREG32(R600_BUS_CNTL, (bus_cntl & ~R600_BIOS_ROM_DIS));
1167  	if (adev->mode_info.num_crtc) {
1168  		/* Disable VGA mode */
1169  		WREG32(AVIVO_D1VGA_CONTROL,
1170  		       (d1vga_control & ~(AVIVO_DVGA_CONTROL_MODE_ENABLE |
1171  					  AVIVO_DVGA_CONTROL_TIMING_SELECT)));
1172  		WREG32(AVIVO_D2VGA_CONTROL,
1173  		       (d2vga_control & ~(AVIVO_DVGA_CONTROL_MODE_ENABLE |
1174  					  AVIVO_DVGA_CONTROL_TIMING_SELECT)));
1175  		WREG32(VGA_RENDER_CONTROL,
1176  		       (vga_render_control & C_000300_VGA_VSTATUS_CNTL));
1177  	}
1178  	WREG32(R600_ROM_CNTL, rom_cntl | R600_SCK_OVERWRITE);
1179  
1180  	r = amdgpu_read_bios(adev);
1181  
1182  	/* restore regs */
1183  	WREG32(R600_BUS_CNTL, bus_cntl);
1184  	if (adev->mode_info.num_crtc) {
1185  		WREG32(AVIVO_D1VGA_CONTROL, d1vga_control);
1186  		WREG32(AVIVO_D2VGA_CONTROL, d2vga_control);
1187  		WREG32(VGA_RENDER_CONTROL, vga_render_control);
1188  	}
1189  	WREG32(R600_ROM_CNTL, rom_cntl);
1190  	return r;
1191  }
1192  
1193  #define mmROM_INDEX 0x2A
1194  #define mmROM_DATA  0x2B
1195  
1196  static bool si_read_bios_from_rom(struct amdgpu_device *adev,
1197  				  u8 *bios, u32 length_bytes)
1198  {
1199  	u32 *dw_ptr;
1200  	u32 i, length_dw;
1201  
1202  	if (bios == NULL)
1203  		return false;
1204  	if (length_bytes == 0)
1205  		return false;
1206  	/* APU vbios image is part of sbios image */
1207  	if (adev->flags & AMD_IS_APU)
1208  		return false;
1209  
1210  	dw_ptr = (u32 *)bios;
1211  	length_dw = ALIGN(length_bytes, 4) / 4;
1212  	/* set rom index to 0 */
1213  	WREG32(mmROM_INDEX, 0);
1214  	for (i = 0; i < length_dw; i++)
1215  		dw_ptr[i] = RREG32(mmROM_DATA);
1216  
1217  	return true;
1218  }
1219  
1220  static void si_set_clk_bypass_mode(struct amdgpu_device *adev)
1221  {
1222  	u32 tmp, i;
1223  
1224  	tmp = RREG32(CG_SPLL_FUNC_CNTL);
1225  	tmp |= SPLL_BYPASS_EN;
1226  	WREG32(CG_SPLL_FUNC_CNTL, tmp);
1227  
1228  	tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
1229  	tmp |= SPLL_CTLREQ_CHG;
1230  	WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
1231  
1232  	for (i = 0; i < adev->usec_timeout; i++) {
1233  		if (RREG32(SPLL_STATUS) & SPLL_CHG_STATUS)
1234  			break;
1235  		udelay(1);
1236  	}
1237  
1238  	tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
1239  	tmp &= ~(SPLL_CTLREQ_CHG | SCLK_MUX_UPDATE);
1240  	WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
1241  
1242  	tmp = RREG32(MPLL_CNTL_MODE);
1243  	tmp &= ~MPLL_MCLK_SEL;
1244  	WREG32(MPLL_CNTL_MODE, tmp);
1245  }
1246  
1247  static void si_spll_powerdown(struct amdgpu_device *adev)
1248  {
1249  	u32 tmp;
1250  
1251  	tmp = RREG32(SPLL_CNTL_MODE);
1252  	tmp |= SPLL_SW_DIR_CONTROL;
1253  	WREG32(SPLL_CNTL_MODE, tmp);
1254  
1255  	tmp = RREG32(CG_SPLL_FUNC_CNTL);
1256  	tmp |= SPLL_RESET;
1257  	WREG32(CG_SPLL_FUNC_CNTL, tmp);
1258  
1259  	tmp = RREG32(CG_SPLL_FUNC_CNTL);
1260  	tmp |= SPLL_SLEEP;
1261  	WREG32(CG_SPLL_FUNC_CNTL, tmp);
1262  
1263  	tmp = RREG32(SPLL_CNTL_MODE);
1264  	tmp &= ~SPLL_SW_DIR_CONTROL;
1265  	WREG32(SPLL_CNTL_MODE, tmp);
1266  }
1267  
1268  static int si_gpu_pci_config_reset(struct amdgpu_device *adev)
1269  {
1270  	u32 i;
1271  	int r = -EINVAL;
1272  
1273  	amdgpu_atombios_scratch_regs_engine_hung(adev, true);
1274  
1275  	/* set mclk/sclk to bypass */
1276  	si_set_clk_bypass_mode(adev);
1277  	/* powerdown spll */
1278  	si_spll_powerdown(adev);
1279  	/* disable BM */
1280  	pci_clear_master(adev->pdev);
1281  	/* reset */
1282  	amdgpu_device_pci_config_reset(adev);
1283  
1284  	udelay(100);
1285  
1286  	/* wait for asic to come out of reset */
1287  	for (i = 0; i < adev->usec_timeout; i++) {
1288  		if (RREG32(mmCONFIG_MEMSIZE) != 0xffffffff) {
1289  			/* enable BM */
1290  			pci_set_master(adev->pdev);
1291  			adev->has_hw_reset = true;
1292  			r = 0;
1293  			break;
1294  		}
1295  		udelay(1);
1296  	}
1297  	amdgpu_atombios_scratch_regs_engine_hung(adev, false);
1298  
1299  	return r;
1300  }
1301  
1302  static bool si_asic_supports_baco(struct amdgpu_device *adev)
1303  {
1304  	return false;
1305  }
1306  
1307  static enum amd_reset_method
1308  si_asic_reset_method(struct amdgpu_device *adev)
1309  {
1310  	if (amdgpu_reset_method == AMD_RESET_METHOD_PCI)
1311  		return amdgpu_reset_method;
1312  	else if (amdgpu_reset_method != AMD_RESET_METHOD_LEGACY &&
1313  		 amdgpu_reset_method != -1)
1314  		dev_warn(adev->dev, "Specified reset method:%d isn't supported, using AUTO instead.\n",
1315  			 amdgpu_reset_method);
1316  
1317  	return AMD_RESET_METHOD_LEGACY;
1318  }
1319  
1320  static int si_asic_reset(struct amdgpu_device *adev)
1321  {
1322  	int r;
1323  
1324  	switch (si_asic_reset_method(adev)) {
1325  	case AMD_RESET_METHOD_PCI:
1326  		dev_info(adev->dev, "PCI reset\n");
1327  		r = amdgpu_device_pci_reset(adev);
1328  		break;
1329  	default:
1330  		dev_info(adev->dev, "PCI CONFIG reset\n");
1331  		r = si_gpu_pci_config_reset(adev);
1332  		break;
1333  	}
1334  
1335  	return r;
1336  }
1337  
1338  static u32 si_get_config_memsize(struct amdgpu_device *adev)
1339  {
1340  	return RREG32(mmCONFIG_MEMSIZE);
1341  }
1342  
1343  static void si_vga_set_state(struct amdgpu_device *adev, bool state)
1344  {
1345  	uint32_t temp;
1346  
1347  	temp = RREG32(CONFIG_CNTL);
1348  	if (!state) {
1349  		temp &= ~(1<<0);
1350  		temp |= (1<<1);
1351  	} else {
1352  		temp &= ~(1<<1);
1353  	}
1354  	WREG32(CONFIG_CNTL, temp);
1355  }
1356  
1357  static u32 si_get_xclk(struct amdgpu_device *adev)
1358  {
1359  	u32 reference_clock = adev->clock.spll.reference_freq;
1360  	u32 tmp;
1361  
1362  	tmp = RREG32(CG_CLKPIN_CNTL_2);
1363  	if (tmp & MUX_TCLK_TO_XCLK)
1364  		return TCLK;
1365  
1366  	tmp = RREG32(CG_CLKPIN_CNTL);
1367  	if (tmp & XTALIN_DIVIDE)
1368  		return reference_clock / 4;
1369  
1370  	return reference_clock;
1371  }
1372  
1373  static void si_flush_hdp(struct amdgpu_device *adev, struct amdgpu_ring *ring)
1374  {
1375  	if (!ring || !ring->funcs->emit_wreg) {
1376  		WREG32(mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1);
1377  		RREG32(mmHDP_MEM_COHERENCY_FLUSH_CNTL);
1378  	} else {
1379  		amdgpu_ring_emit_wreg(ring, mmHDP_MEM_COHERENCY_FLUSH_CNTL, 1);
1380  	}
1381  }
1382  
1383  static void si_invalidate_hdp(struct amdgpu_device *adev,
1384  			      struct amdgpu_ring *ring)
1385  {
1386  	if (!ring || !ring->funcs->emit_wreg) {
1387  		WREG32(mmHDP_DEBUG0, 1);
1388  		RREG32(mmHDP_DEBUG0);
1389  	} else {
1390  		amdgpu_ring_emit_wreg(ring, mmHDP_DEBUG0, 1);
1391  	}
1392  }
1393  
1394  static bool si_need_full_reset(struct amdgpu_device *adev)
1395  {
1396  	/* change this when we support soft reset */
1397  	return true;
1398  }
1399  
1400  static bool si_need_reset_on_init(struct amdgpu_device *adev)
1401  {
1402  	return false;
1403  }
1404  
1405  static int si_get_pcie_lanes(struct amdgpu_device *adev)
1406  {
1407  	u32 link_width_cntl;
1408  
1409  	if (adev->flags & AMD_IS_APU)
1410  		return 0;
1411  
1412  	link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
1413  
1414  	switch ((link_width_cntl & LC_LINK_WIDTH_RD_MASK) >> LC_LINK_WIDTH_RD_SHIFT) {
1415  	case LC_LINK_WIDTH_X1:
1416  		return 1;
1417  	case LC_LINK_WIDTH_X2:
1418  		return 2;
1419  	case LC_LINK_WIDTH_X4:
1420  		return 4;
1421  	case LC_LINK_WIDTH_X8:
1422  		return 8;
1423  	case LC_LINK_WIDTH_X0:
1424  	case LC_LINK_WIDTH_X16:
1425  	default:
1426  		return 16;
1427  	}
1428  }
1429  
1430  static void si_set_pcie_lanes(struct amdgpu_device *adev, int lanes)
1431  {
1432  	u32 link_width_cntl, mask;
1433  
1434  	if (adev->flags & AMD_IS_APU)
1435  		return;
1436  
1437  	switch (lanes) {
1438  	case 0:
1439  		mask = LC_LINK_WIDTH_X0;
1440  		break;
1441  	case 1:
1442  		mask = LC_LINK_WIDTH_X1;
1443  		break;
1444  	case 2:
1445  		mask = LC_LINK_WIDTH_X2;
1446  		break;
1447  	case 4:
1448  		mask = LC_LINK_WIDTH_X4;
1449  		break;
1450  	case 8:
1451  		mask = LC_LINK_WIDTH_X8;
1452  		break;
1453  	case 16:
1454  		mask = LC_LINK_WIDTH_X16;
1455  		break;
1456  	default:
1457  		DRM_ERROR("invalid pcie lane request: %d\n", lanes);
1458  		return;
1459  	}
1460  
1461  	link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
1462  	link_width_cntl &= ~LC_LINK_WIDTH_MASK;
1463  	link_width_cntl |= mask << LC_LINK_WIDTH_SHIFT;
1464  	link_width_cntl |= (LC_RECONFIG_NOW |
1465  			    LC_RECONFIG_ARC_MISSING_ESCAPE);
1466  
1467  	WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
1468  }
1469  
1470  static void si_get_pcie_usage(struct amdgpu_device *adev, uint64_t *count0,
1471  			      uint64_t *count1)
1472  {
1473  	uint32_t perfctr = 0;
1474  	uint64_t cnt0_of, cnt1_of;
1475  	int tmp;
1476  
1477  	/* This reports 0 on APUs, so return to avoid writing/reading registers
1478  	 * that may or may not be different from their GPU counterparts
1479  	 */
1480  	if (adev->flags & AMD_IS_APU)
1481  		return;
1482  
1483  	/* Set the 2 events that we wish to watch, defined above */
1484  	/* Reg 40 is # received msgs, Reg 104 is # of posted requests sent */
1485  	perfctr = REG_SET_FIELD(perfctr, PCIE_PERF_CNTL_TXCLK, EVENT0_SEL, 40);
1486  	perfctr = REG_SET_FIELD(perfctr, PCIE_PERF_CNTL_TXCLK, EVENT1_SEL, 104);
1487  
1488  	/* Write to enable desired perf counters */
1489  	WREG32_PCIE(ixPCIE_PERF_CNTL_TXCLK, perfctr);
1490  	/* Zero out and enable the perf counters
1491  	 * Write 0x5:
1492  	 * Bit 0 = Start all counters(1)
1493  	 * Bit 2 = Global counter reset enable(1)
1494  	 */
1495  	WREG32_PCIE(ixPCIE_PERF_COUNT_CNTL, 0x00000005);
1496  
1497  	msleep(1000);
1498  
1499  	/* Load the shadow and disable the perf counters
1500  	 * Write 0x2:
1501  	 * Bit 0 = Stop counters(0)
1502  	 * Bit 1 = Load the shadow counters(1)
1503  	 */
1504  	WREG32_PCIE(ixPCIE_PERF_COUNT_CNTL, 0x00000002);
1505  
1506  	/* Read register values to get any >32bit overflow */
1507  	tmp = RREG32_PCIE(ixPCIE_PERF_CNTL_TXCLK);
1508  	cnt0_of = REG_GET_FIELD(tmp, PCIE_PERF_CNTL_TXCLK, COUNTER0_UPPER);
1509  	cnt1_of = REG_GET_FIELD(tmp, PCIE_PERF_CNTL_TXCLK, COUNTER1_UPPER);
1510  
1511  	/* Get the values and add the overflow */
1512  	*count0 = RREG32_PCIE(ixPCIE_PERF_COUNT0_TXCLK) | (cnt0_of << 32);
1513  	*count1 = RREG32_PCIE(ixPCIE_PERF_COUNT1_TXCLK) | (cnt1_of << 32);
1514  }
1515  
1516  static uint64_t si_get_pcie_replay_count(struct amdgpu_device *adev)
1517  {
1518  	uint64_t nak_r, nak_g;
1519  
1520  	/* Get the number of NAKs received and generated */
1521  	nak_r = RREG32_PCIE(ixPCIE_RX_NUM_NAK);
1522  	nak_g = RREG32_PCIE(ixPCIE_RX_NUM_NAK_GENERATED);
1523  
1524  	/* Add the total number of NAKs, i.e the number of replays */
1525  	return (nak_r + nak_g);
1526  }
1527  
1528  static int si_uvd_send_upll_ctlreq(struct amdgpu_device *adev,
1529  				   unsigned cg_upll_func_cntl)
1530  {
1531  	unsigned i;
1532  
1533  	/* Make sure UPLL_CTLREQ is deasserted */
1534  	WREG32_P(cg_upll_func_cntl, 0, ~UPLL_CTLREQ_MASK);
1535  
1536  	mdelay(10);
1537  
1538  	/* Assert UPLL_CTLREQ */
1539  	WREG32_P(cg_upll_func_cntl, UPLL_CTLREQ_MASK, ~UPLL_CTLREQ_MASK);
1540  
1541  	/* Wait for CTLACK and CTLACK2 to get asserted */
1542  	for (i = 0; i < SI_MAX_CTLACKS_ASSERTION_WAIT; ++i) {
1543  		uint32_t mask = UPLL_CTLACK_MASK | UPLL_CTLACK2_MASK;
1544  
1545  		if ((RREG32(cg_upll_func_cntl) & mask) == mask)
1546  			break;
1547  		mdelay(10);
1548  	}
1549  
1550  	/* Deassert UPLL_CTLREQ */
1551  	WREG32_P(cg_upll_func_cntl, 0, ~UPLL_CTLREQ_MASK);
1552  
1553  	if (i == SI_MAX_CTLACKS_ASSERTION_WAIT) {
1554  		DRM_ERROR("Timeout setting UVD clocks!\n");
1555  		return -ETIMEDOUT;
1556  	}
1557  
1558  	return 0;
1559  }
1560  
1561  static unsigned si_uvd_calc_upll_post_div(unsigned vco_freq,
1562  					  unsigned target_freq,
1563  					  unsigned pd_min,
1564  					  unsigned pd_even)
1565  {
1566  	unsigned post_div = vco_freq / target_freq;
1567  
1568  	/* Adjust to post divider minimum value */
1569  	if (post_div < pd_min)
1570  		post_div = pd_min;
1571  
1572  	/* We alway need a frequency less than or equal the target */
1573  	if ((vco_freq / post_div) > target_freq)
1574  		post_div += 1;
1575  
1576  	/* Post dividers above a certain value must be even */
1577  	if (post_div > pd_even && post_div % 2)
1578  		post_div += 1;
1579  
1580  	return post_div;
1581  }
1582  
1583  /**
1584   * si_calc_upll_dividers - calc UPLL clock dividers
1585   *
1586   * @adev: amdgpu_device pointer
1587   * @vclk: wanted VCLK
1588   * @dclk: wanted DCLK
1589   * @vco_min: minimum VCO frequency
1590   * @vco_max: maximum VCO frequency
1591   * @fb_factor: factor to multiply vco freq with
1592   * @fb_mask: limit and bitmask for feedback divider
1593   * @pd_min: post divider minimum
1594   * @pd_max: post divider maximum
1595   * @pd_even: post divider must be even above this value
1596   * @optimal_fb_div: resulting feedback divider
1597   * @optimal_vclk_div: resulting vclk post divider
1598   * @optimal_dclk_div: resulting dclk post divider
1599   *
1600   * Calculate dividers for UVDs UPLL (except APUs).
1601   * Returns zero on success; -EINVAL on error.
1602   */
1603  static int si_calc_upll_dividers(struct amdgpu_device *adev,
1604  				 unsigned vclk, unsigned dclk,
1605  				 unsigned vco_min, unsigned vco_max,
1606  				 unsigned fb_factor, unsigned fb_mask,
1607  				 unsigned pd_min, unsigned pd_max,
1608  				 unsigned pd_even,
1609  				 unsigned *optimal_fb_div,
1610  				 unsigned *optimal_vclk_div,
1611  				 unsigned *optimal_dclk_div)
1612  {
1613  	unsigned vco_freq, ref_freq = adev->clock.spll.reference_freq;
1614  
1615  	/* Start off with something large */
1616  	unsigned optimal_score = ~0;
1617  
1618  	/* Loop through vco from low to high */
1619  	vco_min = max(max(vco_min, vclk), dclk);
1620  	for (vco_freq = vco_min; vco_freq <= vco_max; vco_freq += 100) {
1621  		uint64_t fb_div = (uint64_t)vco_freq * fb_factor;
1622  		unsigned vclk_div, dclk_div, score;
1623  
1624  		do_div(fb_div, ref_freq);
1625  
1626  		/* fb div out of range ? */
1627  		if (fb_div > fb_mask)
1628  			break; /* It can oly get worse */
1629  
1630  		fb_div &= fb_mask;
1631  
1632  		/* Calc vclk divider with current vco freq */
1633  		vclk_div = si_uvd_calc_upll_post_div(vco_freq, vclk,
1634  						     pd_min, pd_even);
1635  		if (vclk_div > pd_max)
1636  			break; /* vco is too big, it has to stop */
1637  
1638  		/* Calc dclk divider with current vco freq */
1639  		dclk_div = si_uvd_calc_upll_post_div(vco_freq, dclk,
1640  						     pd_min, pd_even);
1641  		if (dclk_div > pd_max)
1642  			break; /* vco is too big, it has to stop */
1643  
1644  		/* Calc score with current vco freq */
1645  		score = vclk - (vco_freq / vclk_div) + dclk - (vco_freq / dclk_div);
1646  
1647  		/* Determine if this vco setting is better than current optimal settings */
1648  		if (score < optimal_score) {
1649  			*optimal_fb_div = fb_div;
1650  			*optimal_vclk_div = vclk_div;
1651  			*optimal_dclk_div = dclk_div;
1652  			optimal_score = score;
1653  			if (optimal_score == 0)
1654  				break; /* It can't get better than this */
1655  		}
1656  	}
1657  
1658  	/* Did we found a valid setup ? */
1659  	if (optimal_score == ~0)
1660  		return -EINVAL;
1661  
1662  	return 0;
1663  }
1664  
1665  static int si_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk)
1666  {
1667  	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1668  	int r;
1669  
1670  	/* Bypass vclk and dclk with bclk */
1671  	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1672  		 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1673  		 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1674  
1675  	/* Put PLL in bypass mode */
1676  	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1677  
1678  	if (!vclk || !dclk) {
1679  		/* Keep the Bypass mode */
1680  		return 0;
1681  	}
1682  
1683  	r = si_calc_upll_dividers(adev, vclk, dclk, 125000, 250000,
1684  				  16384, 0x03FFFFFF, 0, 128, 5,
1685  				  &fb_div, &vclk_div, &dclk_div);
1686  	if (r)
1687  		return r;
1688  
1689  	/* Set RESET_ANTI_MUX to 0 */
1690  	WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
1691  
1692  	/* Set VCO_MODE to 1 */
1693  	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1694  
1695  	/* Disable sleep mode */
1696  	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1697  
1698  	/* Deassert UPLL_RESET */
1699  	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1700  
1701  	mdelay(1);
1702  
1703  	r = si_uvd_send_upll_ctlreq(adev, CG_UPLL_FUNC_CNTL);
1704  	if (r)
1705  		return r;
1706  
1707  	/* Assert UPLL_RESET again */
1708  	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1709  
1710  	/* Disable spread spectrum. */
1711  	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1712  
1713  	/* Set feedback divider */
1714  	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1715  
1716  	/* Set ref divider to 0 */
1717  	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1718  
1719  	if (fb_div < 307200)
1720  		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1721  	else
1722  		WREG32_P(CG_UPLL_FUNC_CNTL_4,
1723  			 UPLL_SPARE_ISPARE9,
1724  			 ~UPLL_SPARE_ISPARE9);
1725  
1726  	/* Set PDIV_A and PDIV_B */
1727  	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1728  		 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1729  		 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1730  
1731  	/* Give the PLL some time to settle */
1732  	mdelay(15);
1733  
1734  	/* Deassert PLL_RESET */
1735  	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1736  
1737  	mdelay(15);
1738  
1739  	/* Switch from bypass mode to normal mode */
1740  	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1741  
1742  	r = si_uvd_send_upll_ctlreq(adev, CG_UPLL_FUNC_CNTL);
1743  	if (r)
1744  		return r;
1745  
1746  	/* Switch VCLK and DCLK selection */
1747  	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1748  		 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1749  		 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1750  
1751  	mdelay(100);
1752  
1753  	return 0;
1754  }
1755  
1756  static int si_vce_send_vcepll_ctlreq(struct amdgpu_device *adev)
1757  {
1758  	unsigned i;
1759  
1760  	/* Make sure VCEPLL_CTLREQ is deasserted */
1761  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~UPLL_CTLREQ_MASK);
1762  
1763  	mdelay(10);
1764  
1765  	/* Assert UPLL_CTLREQ */
1766  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, UPLL_CTLREQ_MASK, ~UPLL_CTLREQ_MASK);
1767  
1768  	/* Wait for CTLACK and CTLACK2 to get asserted */
1769  	for (i = 0; i < SI_MAX_CTLACKS_ASSERTION_WAIT; ++i) {
1770  		uint32_t mask = UPLL_CTLACK_MASK | UPLL_CTLACK2_MASK;
1771  
1772  		if ((RREG32_SMC(CG_VCEPLL_FUNC_CNTL) & mask) == mask)
1773  			break;
1774  		mdelay(10);
1775  	}
1776  
1777  	/* Deassert UPLL_CTLREQ */
1778  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~UPLL_CTLREQ_MASK);
1779  
1780  	if (i == SI_MAX_CTLACKS_ASSERTION_WAIT) {
1781  		DRM_ERROR("Timeout setting UVD clocks!\n");
1782  		return -ETIMEDOUT;
1783  	}
1784  
1785  	return 0;
1786  }
1787  
1788  static int si_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk)
1789  {
1790  	unsigned fb_div = 0, evclk_div = 0, ecclk_div = 0;
1791  	int r;
1792  
1793  	/* Bypass evclk and ecclk with bclk */
1794  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_2,
1795  		     EVCLK_SRC_SEL(1) | ECCLK_SRC_SEL(1),
1796  		     ~(EVCLK_SRC_SEL_MASK | ECCLK_SRC_SEL_MASK));
1797  
1798  	/* Put PLL in bypass mode */
1799  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_BYPASS_EN_MASK,
1800  		     ~VCEPLL_BYPASS_EN_MASK);
1801  
1802  	if (!evclk || !ecclk) {
1803  		/* Keep the Bypass mode, put PLL to sleep */
1804  		WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_SLEEP_MASK,
1805  			     ~VCEPLL_SLEEP_MASK);
1806  		return 0;
1807  	}
1808  
1809  	r = si_calc_upll_dividers(adev, evclk, ecclk, 125000, 250000,
1810  				  16384, 0x03FFFFFF, 0, 128, 5,
1811  				  &fb_div, &evclk_div, &ecclk_div);
1812  	if (r)
1813  		return r;
1814  
1815  	/* Set RESET_ANTI_MUX to 0 */
1816  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
1817  
1818  	/* Set VCO_MODE to 1 */
1819  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_VCO_MODE_MASK,
1820  		     ~VCEPLL_VCO_MODE_MASK);
1821  
1822  	/* Toggle VCEPLL_SLEEP to 1 then back to 0 */
1823  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_SLEEP_MASK,
1824  		     ~VCEPLL_SLEEP_MASK);
1825  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_SLEEP_MASK);
1826  
1827  	/* Deassert VCEPLL_RESET */
1828  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_RESET_MASK);
1829  
1830  	mdelay(1);
1831  
1832  	r = si_vce_send_vcepll_ctlreq(adev);
1833  	if (r)
1834  		return r;
1835  
1836  	/* Assert VCEPLL_RESET again */
1837  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, VCEPLL_RESET_MASK, ~VCEPLL_RESET_MASK);
1838  
1839  	/* Disable spread spectrum. */
1840  	WREG32_SMC_P(CG_VCEPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1841  
1842  	/* Set feedback divider */
1843  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_3,
1844  		     VCEPLL_FB_DIV(fb_div),
1845  		     ~VCEPLL_FB_DIV_MASK);
1846  
1847  	/* Set ref divider to 0 */
1848  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_REF_DIV_MASK);
1849  
1850  	/* Set PDIV_A and PDIV_B */
1851  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_2,
1852  		     VCEPLL_PDIV_A(evclk_div) | VCEPLL_PDIV_B(ecclk_div),
1853  		     ~(VCEPLL_PDIV_A_MASK | VCEPLL_PDIV_B_MASK));
1854  
1855  	/* Give the PLL some time to settle */
1856  	mdelay(15);
1857  
1858  	/* Deassert PLL_RESET */
1859  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_RESET_MASK);
1860  
1861  	mdelay(15);
1862  
1863  	/* Switch from bypass mode to normal mode */
1864  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL, 0, ~VCEPLL_BYPASS_EN_MASK);
1865  
1866  	r = si_vce_send_vcepll_ctlreq(adev);
1867  	if (r)
1868  		return r;
1869  
1870  	/* Switch VCLK and DCLK selection */
1871  	WREG32_SMC_P(CG_VCEPLL_FUNC_CNTL_2,
1872  		     EVCLK_SRC_SEL(16) | ECCLK_SRC_SEL(16),
1873  		     ~(EVCLK_SRC_SEL_MASK | ECCLK_SRC_SEL_MASK));
1874  
1875  	mdelay(100);
1876  
1877  	return 0;
1878  }
1879  
1880  static void si_pre_asic_init(struct amdgpu_device *adev)
1881  {
1882  }
1883  
1884  static const struct amdgpu_asic_funcs si_asic_funcs =
1885  {
1886  	.read_disabled_bios = &si_read_disabled_bios,
1887  	.read_bios_from_rom = &si_read_bios_from_rom,
1888  	.read_register = &si_read_register,
1889  	.reset = &si_asic_reset,
1890  	.reset_method = &si_asic_reset_method,
1891  	.set_vga_state = &si_vga_set_state,
1892  	.get_xclk = &si_get_xclk,
1893  	.set_uvd_clocks = &si_set_uvd_clocks,
1894  	.set_vce_clocks = &si_set_vce_clocks,
1895  	.get_pcie_lanes = &si_get_pcie_lanes,
1896  	.set_pcie_lanes = &si_set_pcie_lanes,
1897  	.get_config_memsize = &si_get_config_memsize,
1898  	.flush_hdp = &si_flush_hdp,
1899  	.invalidate_hdp = &si_invalidate_hdp,
1900  	.need_full_reset = &si_need_full_reset,
1901  	.get_pcie_usage = &si_get_pcie_usage,
1902  	.need_reset_on_init = &si_need_reset_on_init,
1903  	.get_pcie_replay_count = &si_get_pcie_replay_count,
1904  	.supports_baco = &si_asic_supports_baco,
1905  	.pre_asic_init = &si_pre_asic_init,
1906  };
1907  
1908  static uint32_t si_get_rev_id(struct amdgpu_device *adev)
1909  {
1910  	return (RREG32(CC_DRM_ID_STRAPS) & CC_DRM_ID_STRAPS__ATI_REV_ID_MASK)
1911  		>> CC_DRM_ID_STRAPS__ATI_REV_ID__SHIFT;
1912  }
1913  
1914  static int si_common_early_init(void *handle)
1915  {
1916  	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1917  
1918  	adev->smc_rreg = &si_smc_rreg;
1919  	adev->smc_wreg = &si_smc_wreg;
1920  	adev->pcie_rreg = &si_pcie_rreg;
1921  	adev->pcie_wreg = &si_pcie_wreg;
1922  	adev->pciep_rreg = &si_pciep_rreg;
1923  	adev->pciep_wreg = &si_pciep_wreg;
1924  	adev->uvd_ctx_rreg = si_uvd_ctx_rreg;
1925  	adev->uvd_ctx_wreg = si_uvd_ctx_wreg;
1926  	adev->didt_rreg = NULL;
1927  	adev->didt_wreg = NULL;
1928  
1929  	adev->asic_funcs = &si_asic_funcs;
1930  
1931  	adev->rev_id = si_get_rev_id(adev);
1932  	adev->external_rev_id = 0xFF;
1933  	switch (adev->asic_type) {
1934  	case CHIP_TAHITI:
1935  		adev->cg_flags =
1936  			AMD_CG_SUPPORT_GFX_MGCG |
1937  			AMD_CG_SUPPORT_GFX_MGLS |
1938  			/*AMD_CG_SUPPORT_GFX_CGCG |*/
1939  			AMD_CG_SUPPORT_GFX_CGLS |
1940  			AMD_CG_SUPPORT_GFX_CGTS |
1941  			AMD_CG_SUPPORT_GFX_CP_LS |
1942  			AMD_CG_SUPPORT_MC_MGCG |
1943  			AMD_CG_SUPPORT_SDMA_MGCG |
1944  			AMD_CG_SUPPORT_BIF_LS |
1945  			AMD_CG_SUPPORT_VCE_MGCG |
1946  			AMD_CG_SUPPORT_UVD_MGCG |
1947  			AMD_CG_SUPPORT_HDP_LS |
1948  			AMD_CG_SUPPORT_HDP_MGCG;
1949  		adev->pg_flags = 0;
1950  		adev->external_rev_id = (adev->rev_id == 0) ? 1 :
1951  					(adev->rev_id == 1) ? 5 : 6;
1952  		break;
1953  	case CHIP_PITCAIRN:
1954  		adev->cg_flags =
1955  			AMD_CG_SUPPORT_GFX_MGCG |
1956  			AMD_CG_SUPPORT_GFX_MGLS |
1957  			/*AMD_CG_SUPPORT_GFX_CGCG |*/
1958  			AMD_CG_SUPPORT_GFX_CGLS |
1959  			AMD_CG_SUPPORT_GFX_CGTS |
1960  			AMD_CG_SUPPORT_GFX_CP_LS |
1961  			AMD_CG_SUPPORT_GFX_RLC_LS |
1962  			AMD_CG_SUPPORT_MC_LS |
1963  			AMD_CG_SUPPORT_MC_MGCG |
1964  			AMD_CG_SUPPORT_SDMA_MGCG |
1965  			AMD_CG_SUPPORT_BIF_LS |
1966  			AMD_CG_SUPPORT_VCE_MGCG |
1967  			AMD_CG_SUPPORT_UVD_MGCG |
1968  			AMD_CG_SUPPORT_HDP_LS |
1969  			AMD_CG_SUPPORT_HDP_MGCG;
1970  		adev->pg_flags = 0;
1971  		adev->external_rev_id = adev->rev_id + 20;
1972  		break;
1973  
1974  	case CHIP_VERDE:
1975  		adev->cg_flags =
1976  			AMD_CG_SUPPORT_GFX_MGCG |
1977  			AMD_CG_SUPPORT_GFX_MGLS |
1978  			AMD_CG_SUPPORT_GFX_CGLS |
1979  			AMD_CG_SUPPORT_GFX_CGTS |
1980  			AMD_CG_SUPPORT_GFX_CGTS_LS |
1981  			AMD_CG_SUPPORT_GFX_CP_LS |
1982  			AMD_CG_SUPPORT_MC_LS |
1983  			AMD_CG_SUPPORT_MC_MGCG |
1984  			AMD_CG_SUPPORT_SDMA_MGCG |
1985  			AMD_CG_SUPPORT_SDMA_LS |
1986  			AMD_CG_SUPPORT_BIF_LS |
1987  			AMD_CG_SUPPORT_VCE_MGCG |
1988  			AMD_CG_SUPPORT_UVD_MGCG |
1989  			AMD_CG_SUPPORT_HDP_LS |
1990  			AMD_CG_SUPPORT_HDP_MGCG;
1991  		adev->pg_flags = 0;
1992  		//???
1993  		adev->external_rev_id = adev->rev_id + 40;
1994  		break;
1995  	case CHIP_OLAND:
1996  		adev->cg_flags =
1997  			AMD_CG_SUPPORT_GFX_MGCG |
1998  			AMD_CG_SUPPORT_GFX_MGLS |
1999  			/*AMD_CG_SUPPORT_GFX_CGCG |*/
2000  			AMD_CG_SUPPORT_GFX_CGLS |
2001  			AMD_CG_SUPPORT_GFX_CGTS |
2002  			AMD_CG_SUPPORT_GFX_CP_LS |
2003  			AMD_CG_SUPPORT_GFX_RLC_LS |
2004  			AMD_CG_SUPPORT_MC_LS |
2005  			AMD_CG_SUPPORT_MC_MGCG |
2006  			AMD_CG_SUPPORT_SDMA_MGCG |
2007  			AMD_CG_SUPPORT_BIF_LS |
2008  			AMD_CG_SUPPORT_UVD_MGCG |
2009  			AMD_CG_SUPPORT_HDP_LS |
2010  			AMD_CG_SUPPORT_HDP_MGCG;
2011  		adev->pg_flags = 0;
2012  		adev->external_rev_id = 60;
2013  		break;
2014  	case CHIP_HAINAN:
2015  		adev->cg_flags =
2016  			AMD_CG_SUPPORT_GFX_MGCG |
2017  			AMD_CG_SUPPORT_GFX_MGLS |
2018  			/*AMD_CG_SUPPORT_GFX_CGCG |*/
2019  			AMD_CG_SUPPORT_GFX_CGLS |
2020  			AMD_CG_SUPPORT_GFX_CGTS |
2021  			AMD_CG_SUPPORT_GFX_CP_LS |
2022  			AMD_CG_SUPPORT_GFX_RLC_LS |
2023  			AMD_CG_SUPPORT_MC_LS |
2024  			AMD_CG_SUPPORT_MC_MGCG |
2025  			AMD_CG_SUPPORT_SDMA_MGCG |
2026  			AMD_CG_SUPPORT_BIF_LS |
2027  			AMD_CG_SUPPORT_HDP_LS |
2028  			AMD_CG_SUPPORT_HDP_MGCG;
2029  		adev->pg_flags = 0;
2030  		adev->external_rev_id = 70;
2031  		break;
2032  
2033  	default:
2034  		return -EINVAL;
2035  	}
2036  
2037  	return 0;
2038  }
2039  
2040  static int si_common_sw_init(void *handle)
2041  {
2042  	return 0;
2043  }
2044  
2045  static int si_common_sw_fini(void *handle)
2046  {
2047  	return 0;
2048  }
2049  
2050  
2051  static void si_init_golden_registers(struct amdgpu_device *adev)
2052  {
2053  	switch (adev->asic_type) {
2054  	case CHIP_TAHITI:
2055  		amdgpu_device_program_register_sequence(adev,
2056  							tahiti_golden_registers,
2057  							ARRAY_SIZE(tahiti_golden_registers));
2058  		amdgpu_device_program_register_sequence(adev,
2059  							tahiti_golden_rlc_registers,
2060  							ARRAY_SIZE(tahiti_golden_rlc_registers));
2061  		amdgpu_device_program_register_sequence(adev,
2062  							tahiti_mgcg_cgcg_init,
2063  							ARRAY_SIZE(tahiti_mgcg_cgcg_init));
2064  		amdgpu_device_program_register_sequence(adev,
2065  							tahiti_golden_registers2,
2066  							ARRAY_SIZE(tahiti_golden_registers2));
2067  		break;
2068  	case CHIP_PITCAIRN:
2069  		amdgpu_device_program_register_sequence(adev,
2070  							pitcairn_golden_registers,
2071  							ARRAY_SIZE(pitcairn_golden_registers));
2072  		amdgpu_device_program_register_sequence(adev,
2073  							pitcairn_golden_rlc_registers,
2074  							ARRAY_SIZE(pitcairn_golden_rlc_registers));
2075  		amdgpu_device_program_register_sequence(adev,
2076  							pitcairn_mgcg_cgcg_init,
2077  							ARRAY_SIZE(pitcairn_mgcg_cgcg_init));
2078  		break;
2079  	case CHIP_VERDE:
2080  		amdgpu_device_program_register_sequence(adev,
2081  							verde_golden_registers,
2082  							ARRAY_SIZE(verde_golden_registers));
2083  		amdgpu_device_program_register_sequence(adev,
2084  							verde_golden_rlc_registers,
2085  							ARRAY_SIZE(verde_golden_rlc_registers));
2086  		amdgpu_device_program_register_sequence(adev,
2087  							verde_mgcg_cgcg_init,
2088  							ARRAY_SIZE(verde_mgcg_cgcg_init));
2089  		amdgpu_device_program_register_sequence(adev,
2090  							verde_pg_init,
2091  							ARRAY_SIZE(verde_pg_init));
2092  		break;
2093  	case CHIP_OLAND:
2094  		amdgpu_device_program_register_sequence(adev,
2095  							oland_golden_registers,
2096  							ARRAY_SIZE(oland_golden_registers));
2097  		amdgpu_device_program_register_sequence(adev,
2098  							oland_golden_rlc_registers,
2099  							ARRAY_SIZE(oland_golden_rlc_registers));
2100  		amdgpu_device_program_register_sequence(adev,
2101  							oland_mgcg_cgcg_init,
2102  							ARRAY_SIZE(oland_mgcg_cgcg_init));
2103  		break;
2104  	case CHIP_HAINAN:
2105  		amdgpu_device_program_register_sequence(adev,
2106  							hainan_golden_registers,
2107  							ARRAY_SIZE(hainan_golden_registers));
2108  		amdgpu_device_program_register_sequence(adev,
2109  							hainan_golden_registers2,
2110  							ARRAY_SIZE(hainan_golden_registers2));
2111  		amdgpu_device_program_register_sequence(adev,
2112  							hainan_mgcg_cgcg_init,
2113  							ARRAY_SIZE(hainan_mgcg_cgcg_init));
2114  		break;
2115  
2116  
2117  	default:
2118  		BUG();
2119  	}
2120  }
2121  
2122  static void si_pcie_gen3_enable(struct amdgpu_device *adev)
2123  {
2124  	struct pci_dev *root = adev->pdev->bus->self;
2125  	u32 speed_cntl, current_data_rate;
2126  	int i;
2127  	u16 tmp16;
2128  
2129  	if (pci_is_root_bus(adev->pdev->bus))
2130  		return;
2131  
2132  	if (amdgpu_pcie_gen2 == 0)
2133  		return;
2134  
2135  	if (adev->flags & AMD_IS_APU)
2136  		return;
2137  
2138  	if (!(adev->pm.pcie_gen_mask & (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2 |
2139  					CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3)))
2140  		return;
2141  
2142  	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
2143  	current_data_rate = (speed_cntl & LC_CURRENT_DATA_RATE_MASK) >>
2144  		LC_CURRENT_DATA_RATE_SHIFT;
2145  	if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) {
2146  		if (current_data_rate == 2) {
2147  			DRM_INFO("PCIE gen 3 link speeds already enabled\n");
2148  			return;
2149  		}
2150  		DRM_INFO("enabling PCIE gen 3 link speeds, disable with amdgpu.pcie_gen2=0\n");
2151  	} else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2) {
2152  		if (current_data_rate == 1) {
2153  			DRM_INFO("PCIE gen 2 link speeds already enabled\n");
2154  			return;
2155  		}
2156  		DRM_INFO("enabling PCIE gen 2 link speeds, disable with amdgpu.pcie_gen2=0\n");
2157  	}
2158  
2159  	if (!pci_is_pcie(root) || !pci_is_pcie(adev->pdev))
2160  		return;
2161  
2162  	if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3) {
2163  		if (current_data_rate != 2) {
2164  			u16 bridge_cfg, gpu_cfg;
2165  			u16 bridge_cfg2, gpu_cfg2;
2166  			u32 max_lw, current_lw, tmp;
2167  
2168  			pcie_capability_read_word(root, PCI_EXP_LNKCTL,
2169  						  &bridge_cfg);
2170  			pcie_capability_read_word(adev->pdev, PCI_EXP_LNKCTL,
2171  						  &gpu_cfg);
2172  
2173  			tmp16 = bridge_cfg | PCI_EXP_LNKCTL_HAWD;
2174  			pcie_capability_write_word(root, PCI_EXP_LNKCTL, tmp16);
2175  
2176  			tmp16 = gpu_cfg | PCI_EXP_LNKCTL_HAWD;
2177  			pcie_capability_write_word(adev->pdev, PCI_EXP_LNKCTL,
2178  						   tmp16);
2179  
2180  			tmp = RREG32_PCIE(PCIE_LC_STATUS1);
2181  			max_lw = (tmp & LC_DETECTED_LINK_WIDTH_MASK) >> LC_DETECTED_LINK_WIDTH_SHIFT;
2182  			current_lw = (tmp & LC_OPERATING_LINK_WIDTH_MASK) >> LC_OPERATING_LINK_WIDTH_SHIFT;
2183  
2184  			if (current_lw < max_lw) {
2185  				tmp = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
2186  				if (tmp & LC_RENEGOTIATION_SUPPORT) {
2187  					tmp &= ~(LC_LINK_WIDTH_MASK | LC_UPCONFIGURE_DIS);
2188  					tmp |= (max_lw << LC_LINK_WIDTH_SHIFT);
2189  					tmp |= LC_UPCONFIGURE_SUPPORT | LC_RENEGOTIATE_EN | LC_RECONFIG_NOW;
2190  					WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, tmp);
2191  				}
2192  			}
2193  
2194  			for (i = 0; i < 10; i++) {
2195  				pcie_capability_read_word(adev->pdev,
2196  							  PCI_EXP_DEVSTA,
2197  							  &tmp16);
2198  				if (tmp16 & PCI_EXP_DEVSTA_TRPND)
2199  					break;
2200  
2201  				pcie_capability_read_word(root, PCI_EXP_LNKCTL,
2202  							  &bridge_cfg);
2203  				pcie_capability_read_word(adev->pdev,
2204  							  PCI_EXP_LNKCTL,
2205  							  &gpu_cfg);
2206  
2207  				pcie_capability_read_word(root, PCI_EXP_LNKCTL2,
2208  							  &bridge_cfg2);
2209  				pcie_capability_read_word(adev->pdev,
2210  							  PCI_EXP_LNKCTL2,
2211  							  &gpu_cfg2);
2212  
2213  				tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
2214  				tmp |= LC_SET_QUIESCE;
2215  				WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
2216  
2217  				tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
2218  				tmp |= LC_REDO_EQ;
2219  				WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
2220  
2221  				mdelay(100);
2222  
2223  				pcie_capability_read_word(root, PCI_EXP_LNKCTL,
2224  							  &tmp16);
2225  				tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
2226  				tmp16 |= (bridge_cfg & PCI_EXP_LNKCTL_HAWD);
2227  				pcie_capability_write_word(root, PCI_EXP_LNKCTL,
2228  							   tmp16);
2229  
2230  				pcie_capability_read_word(adev->pdev,
2231  							  PCI_EXP_LNKCTL,
2232  							  &tmp16);
2233  				tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
2234  				tmp16 |= (gpu_cfg & PCI_EXP_LNKCTL_HAWD);
2235  				pcie_capability_write_word(adev->pdev,
2236  							   PCI_EXP_LNKCTL,
2237  							   tmp16);
2238  
2239  				pcie_capability_read_word(root, PCI_EXP_LNKCTL2,
2240  							  &tmp16);
2241  				tmp16 &= ~(PCI_EXP_LNKCTL2_ENTER_COMP |
2242  					   PCI_EXP_LNKCTL2_TX_MARGIN);
2243  				tmp16 |= (bridge_cfg2 &
2244  					  (PCI_EXP_LNKCTL2_ENTER_COMP |
2245  					   PCI_EXP_LNKCTL2_TX_MARGIN));
2246  				pcie_capability_write_word(root,
2247  							   PCI_EXP_LNKCTL2,
2248  							   tmp16);
2249  
2250  				pcie_capability_read_word(adev->pdev,
2251  							  PCI_EXP_LNKCTL2,
2252  							  &tmp16);
2253  				tmp16 &= ~(PCI_EXP_LNKCTL2_ENTER_COMP |
2254  					   PCI_EXP_LNKCTL2_TX_MARGIN);
2255  				tmp16 |= (gpu_cfg2 &
2256  					  (PCI_EXP_LNKCTL2_ENTER_COMP |
2257  					   PCI_EXP_LNKCTL2_TX_MARGIN));
2258  				pcie_capability_write_word(adev->pdev,
2259  							   PCI_EXP_LNKCTL2,
2260  							   tmp16);
2261  
2262  				tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
2263  				tmp &= ~LC_SET_QUIESCE;
2264  				WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
2265  			}
2266  		}
2267  	}
2268  
2269  	speed_cntl |= LC_FORCE_EN_SW_SPEED_CHANGE | LC_FORCE_DIS_HW_SPEED_CHANGE;
2270  	speed_cntl &= ~LC_FORCE_DIS_SW_SPEED_CHANGE;
2271  	WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
2272  
2273  	pcie_capability_read_word(adev->pdev, PCI_EXP_LNKCTL2, &tmp16);
2274  	tmp16 &= ~PCI_EXP_LNKCTL2_TLS;
2275  
2276  	if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3)
2277  		tmp16 |= PCI_EXP_LNKCTL2_TLS_8_0GT; /* gen3 */
2278  	else if (adev->pm.pcie_gen_mask & CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2)
2279  		tmp16 |= PCI_EXP_LNKCTL2_TLS_5_0GT; /* gen2 */
2280  	else
2281  		tmp16 |= PCI_EXP_LNKCTL2_TLS_2_5GT; /* gen1 */
2282  	pcie_capability_write_word(adev->pdev, PCI_EXP_LNKCTL2, tmp16);
2283  
2284  	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
2285  	speed_cntl |= LC_INITIATE_LINK_SPEED_CHANGE;
2286  	WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
2287  
2288  	for (i = 0; i < adev->usec_timeout; i++) {
2289  		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
2290  		if ((speed_cntl & LC_INITIATE_LINK_SPEED_CHANGE) == 0)
2291  			break;
2292  		udelay(1);
2293  	}
2294  }
2295  
2296  static inline u32 si_pif_phy0_rreg(struct amdgpu_device *adev, u32 reg)
2297  {
2298  	unsigned long flags;
2299  	u32 r;
2300  
2301  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2302  	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
2303  	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
2304  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2305  	return r;
2306  }
2307  
2308  static inline void si_pif_phy0_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
2309  {
2310  	unsigned long flags;
2311  
2312  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2313  	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
2314  	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
2315  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2316  }
2317  
2318  static inline u32 si_pif_phy1_rreg(struct amdgpu_device *adev, u32 reg)
2319  {
2320  	unsigned long flags;
2321  	u32 r;
2322  
2323  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2324  	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
2325  	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
2326  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2327  	return r;
2328  }
2329  
2330  static inline void si_pif_phy1_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
2331  {
2332  	unsigned long flags;
2333  
2334  	spin_lock_irqsave(&adev->pcie_idx_lock, flags);
2335  	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
2336  	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
2337  	spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
2338  }
2339  static void si_program_aspm(struct amdgpu_device *adev)
2340  {
2341  	u32 data, orig;
2342  	bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false;
2343  	bool disable_clkreq = false;
2344  
2345  	if (amdgpu_aspm == 0)
2346  		return;
2347  
2348  	if (adev->flags & AMD_IS_APU)
2349  		return;
2350  	orig = data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
2351  	data &= ~LC_XMIT_N_FTS_MASK;
2352  	data |= LC_XMIT_N_FTS(0x24) | LC_XMIT_N_FTS_OVERRIDE_EN;
2353  	if (orig != data)
2354  		WREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL, data);
2355  
2356  	orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL3);
2357  	data |= LC_GO_TO_RECOVERY;
2358  	if (orig != data)
2359  		WREG32_PCIE_PORT(PCIE_LC_CNTL3, data);
2360  
2361  	orig = data = RREG32_PCIE(PCIE_P_CNTL);
2362  	data |= P_IGNORE_EDB_ERR;
2363  	if (orig != data)
2364  		WREG32_PCIE(PCIE_P_CNTL, data);
2365  
2366  	orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
2367  	data &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
2368  	data |= LC_PMI_TO_L1_DIS;
2369  	if (!disable_l0s)
2370  		data |= LC_L0S_INACTIVITY(7);
2371  
2372  	if (!disable_l1) {
2373  		data |= LC_L1_INACTIVITY(7);
2374  		data &= ~LC_PMI_TO_L1_DIS;
2375  		if (orig != data)
2376  			WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
2377  
2378  		if (!disable_plloff_in_l1) {
2379  			bool clk_req_support;
2380  
2381  			orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_0);
2382  			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
2383  			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
2384  			if (orig != data)
2385  				si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_0, data);
2386  
2387  			orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_1);
2388  			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
2389  			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
2390  			if (orig != data)
2391  				si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_1, data);
2392  
2393  			orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_0);
2394  			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
2395  			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
2396  			if (orig != data)
2397  				si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_0, data);
2398  
2399  			orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_1);
2400  			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
2401  			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
2402  			if (orig != data)
2403  				si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_1, data);
2404  
2405  			if ((adev->asic_type != CHIP_OLAND) && (adev->asic_type != CHIP_HAINAN)) {
2406  				orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_0);
2407  				data &= ~PLL_RAMP_UP_TIME_0_MASK;
2408  				if (orig != data)
2409  					si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_0, data);
2410  
2411  				orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_1);
2412  				data &= ~PLL_RAMP_UP_TIME_1_MASK;
2413  				if (orig != data)
2414  					si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_1, data);
2415  
2416  				orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_2);
2417  				data &= ~PLL_RAMP_UP_TIME_2_MASK;
2418  				if (orig != data)
2419  					si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_2, data);
2420  
2421  				orig = data = si_pif_phy0_rreg(adev,PB0_PIF_PWRDOWN_3);
2422  				data &= ~PLL_RAMP_UP_TIME_3_MASK;
2423  				if (orig != data)
2424  					si_pif_phy0_wreg(adev,PB0_PIF_PWRDOWN_3, data);
2425  
2426  				orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_0);
2427  				data &= ~PLL_RAMP_UP_TIME_0_MASK;
2428  				if (orig != data)
2429  					si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_0, data);
2430  
2431  				orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_1);
2432  				data &= ~PLL_RAMP_UP_TIME_1_MASK;
2433  				if (orig != data)
2434  					si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_1, data);
2435  
2436  				orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_2);
2437  				data &= ~PLL_RAMP_UP_TIME_2_MASK;
2438  				if (orig != data)
2439  					si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_2, data);
2440  
2441  				orig = data = si_pif_phy1_rreg(adev,PB1_PIF_PWRDOWN_3);
2442  				data &= ~PLL_RAMP_UP_TIME_3_MASK;
2443  				if (orig != data)
2444  					si_pif_phy1_wreg(adev,PB1_PIF_PWRDOWN_3, data);
2445  			}
2446  			orig = data = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
2447  			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
2448  			data |= LC_DYN_LANES_PWR_STATE(3);
2449  			if (orig != data)
2450  				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
2451  
2452  			orig = data = si_pif_phy0_rreg(adev,PB0_PIF_CNTL);
2453  			data &= ~LS2_EXIT_TIME_MASK;
2454  			if ((adev->asic_type == CHIP_OLAND) || (adev->asic_type == CHIP_HAINAN))
2455  				data |= LS2_EXIT_TIME(5);
2456  			if (orig != data)
2457  				si_pif_phy0_wreg(adev,PB0_PIF_CNTL, data);
2458  
2459  			orig = data = si_pif_phy1_rreg(adev,PB1_PIF_CNTL);
2460  			data &= ~LS2_EXIT_TIME_MASK;
2461  			if ((adev->asic_type == CHIP_OLAND) || (adev->asic_type == CHIP_HAINAN))
2462  				data |= LS2_EXIT_TIME(5);
2463  			if (orig != data)
2464  				si_pif_phy1_wreg(adev,PB1_PIF_CNTL, data);
2465  
2466  			if (!disable_clkreq &&
2467  			    !pci_is_root_bus(adev->pdev->bus)) {
2468  				struct pci_dev *root = adev->pdev->bus->self;
2469  				u32 lnkcap;
2470  
2471  				clk_req_support = false;
2472  				pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap);
2473  				if (lnkcap & PCI_EXP_LNKCAP_CLKPM)
2474  					clk_req_support = true;
2475  			} else {
2476  				clk_req_support = false;
2477  			}
2478  
2479  			if (clk_req_support) {
2480  				orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL2);
2481  				data |= LC_ALLOW_PDWN_IN_L1 | LC_ALLOW_PDWN_IN_L23;
2482  				if (orig != data)
2483  					WREG32_PCIE_PORT(PCIE_LC_CNTL2, data);
2484  
2485  				orig = data = RREG32(THM_CLK_CNTL);
2486  				data &= ~(CMON_CLK_SEL_MASK | TMON_CLK_SEL_MASK);
2487  				data |= CMON_CLK_SEL(1) | TMON_CLK_SEL(1);
2488  				if (orig != data)
2489  					WREG32(THM_CLK_CNTL, data);
2490  
2491  				orig = data = RREG32(MISC_CLK_CNTL);
2492  				data &= ~(DEEP_SLEEP_CLK_SEL_MASK | ZCLK_SEL_MASK);
2493  				data |= DEEP_SLEEP_CLK_SEL(1) | ZCLK_SEL(1);
2494  				if (orig != data)
2495  					WREG32(MISC_CLK_CNTL, data);
2496  
2497  				orig = data = RREG32(CG_CLKPIN_CNTL);
2498  				data &= ~BCLK_AS_XCLK;
2499  				if (orig != data)
2500  					WREG32(CG_CLKPIN_CNTL, data);
2501  
2502  				orig = data = RREG32(CG_CLKPIN_CNTL_2);
2503  				data &= ~FORCE_BIF_REFCLK_EN;
2504  				if (orig != data)
2505  					WREG32(CG_CLKPIN_CNTL_2, data);
2506  
2507  				orig = data = RREG32(MPLL_BYPASSCLK_SEL);
2508  				data &= ~MPLL_CLKOUT_SEL_MASK;
2509  				data |= MPLL_CLKOUT_SEL(4);
2510  				if (orig != data)
2511  					WREG32(MPLL_BYPASSCLK_SEL, data);
2512  
2513  				orig = data = RREG32(SPLL_CNTL_MODE);
2514  				data &= ~SPLL_REFCLK_SEL_MASK;
2515  				if (orig != data)
2516  					WREG32(SPLL_CNTL_MODE, data);
2517  			}
2518  		}
2519  	} else {
2520  		if (orig != data)
2521  			WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
2522  	}
2523  
2524  	orig = data = RREG32_PCIE(PCIE_CNTL2);
2525  	data |= SLV_MEM_LS_EN | MST_MEM_LS_EN | REPLAY_MEM_LS_EN;
2526  	if (orig != data)
2527  		WREG32_PCIE(PCIE_CNTL2, data);
2528  
2529  	if (!disable_l0s) {
2530  		data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
2531  		if((data & LC_N_FTS_MASK) == LC_N_FTS_MASK) {
2532  			data = RREG32_PCIE(PCIE_LC_STATUS1);
2533  			if ((data & LC_REVERSE_XMIT) && (data & LC_REVERSE_RCVR)) {
2534  				orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
2535  				data &= ~LC_L0S_INACTIVITY_MASK;
2536  				if (orig != data)
2537  					WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
2538  			}
2539  		}
2540  	}
2541  }
2542  
2543  static void si_fix_pci_max_read_req_size(struct amdgpu_device *adev)
2544  {
2545  	int readrq;
2546  	u16 v;
2547  
2548  	readrq = pcie_get_readrq(adev->pdev);
2549  	v = ffs(readrq) - 8;
2550  	if ((v == 0) || (v == 6) || (v == 7))
2551  		pcie_set_readrq(adev->pdev, 512);
2552  }
2553  
2554  static int si_common_hw_init(void *handle)
2555  {
2556  	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2557  
2558  	si_fix_pci_max_read_req_size(adev);
2559  	si_init_golden_registers(adev);
2560  	si_pcie_gen3_enable(adev);
2561  	si_program_aspm(adev);
2562  
2563  	return 0;
2564  }
2565  
2566  static int si_common_hw_fini(void *handle)
2567  {
2568  	return 0;
2569  }
2570  
2571  static int si_common_suspend(void *handle)
2572  {
2573  	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2574  
2575  	return si_common_hw_fini(adev);
2576  }
2577  
2578  static int si_common_resume(void *handle)
2579  {
2580  	struct amdgpu_device *adev = (struct amdgpu_device *)handle;
2581  
2582  	return si_common_hw_init(adev);
2583  }
2584  
2585  static bool si_common_is_idle(void *handle)
2586  {
2587  	return true;
2588  }
2589  
2590  static int si_common_wait_for_idle(void *handle)
2591  {
2592  	return 0;
2593  }
2594  
2595  static int si_common_soft_reset(void *handle)
2596  {
2597  	return 0;
2598  }
2599  
2600  static int si_common_set_clockgating_state(void *handle,
2601  					    enum amd_clockgating_state state)
2602  {
2603  	return 0;
2604  }
2605  
2606  static int si_common_set_powergating_state(void *handle,
2607  					    enum amd_powergating_state state)
2608  {
2609  	return 0;
2610  }
2611  
2612  static const struct amd_ip_funcs si_common_ip_funcs = {
2613  	.name = "si_common",
2614  	.early_init = si_common_early_init,
2615  	.late_init = NULL,
2616  	.sw_init = si_common_sw_init,
2617  	.sw_fini = si_common_sw_fini,
2618  	.hw_init = si_common_hw_init,
2619  	.hw_fini = si_common_hw_fini,
2620  	.suspend = si_common_suspend,
2621  	.resume = si_common_resume,
2622  	.is_idle = si_common_is_idle,
2623  	.wait_for_idle = si_common_wait_for_idle,
2624  	.soft_reset = si_common_soft_reset,
2625  	.set_clockgating_state = si_common_set_clockgating_state,
2626  	.set_powergating_state = si_common_set_powergating_state,
2627  };
2628  
2629  static const struct amdgpu_ip_block_version si_common_ip_block =
2630  {
2631  	.type = AMD_IP_BLOCK_TYPE_COMMON,
2632  	.major = 1,
2633  	.minor = 0,
2634  	.rev = 0,
2635  	.funcs = &si_common_ip_funcs,
2636  };
2637  
2638  int si_set_ip_blocks(struct amdgpu_device *adev)
2639  {
2640  	switch (adev->asic_type) {
2641  	case CHIP_VERDE:
2642  	case CHIP_TAHITI:
2643  	case CHIP_PITCAIRN:
2644  		amdgpu_device_ip_block_add(adev, &si_common_ip_block);
2645  		amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block);
2646  		amdgpu_device_ip_block_add(adev, &si_ih_ip_block);
2647  		amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block);
2648  		amdgpu_device_ip_block_add(adev, &si_dma_ip_block);
2649  		amdgpu_device_ip_block_add(adev, &si_smu_ip_block);
2650  		if (adev->enable_virtual_display)
2651  			amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2652  #if defined(CONFIG_DRM_AMD_DC) && defined(CONFIG_DRM_AMD_DC_SI)
2653  		else if (amdgpu_device_has_dc_support(adev))
2654  			amdgpu_device_ip_block_add(adev, &dm_ip_block);
2655  #endif
2656  		else
2657  			amdgpu_device_ip_block_add(adev, &dce_v6_0_ip_block);
2658  		amdgpu_device_ip_block_add(adev, &uvd_v3_1_ip_block);
2659  		/* amdgpu_device_ip_block_add(adev, &vce_v1_0_ip_block); */
2660  		break;
2661  	case CHIP_OLAND:
2662  		amdgpu_device_ip_block_add(adev, &si_common_ip_block);
2663  		amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block);
2664  		amdgpu_device_ip_block_add(adev, &si_ih_ip_block);
2665  		amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block);
2666  		amdgpu_device_ip_block_add(adev, &si_dma_ip_block);
2667  		amdgpu_device_ip_block_add(adev, &si_smu_ip_block);
2668  		if (adev->enable_virtual_display)
2669  			amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2670  #if defined(CONFIG_DRM_AMD_DC) && defined(CONFIG_DRM_AMD_DC_SI)
2671  		else if (amdgpu_device_has_dc_support(adev))
2672  			amdgpu_device_ip_block_add(adev, &dm_ip_block);
2673  #endif
2674  		else
2675  			amdgpu_device_ip_block_add(adev, &dce_v6_4_ip_block);
2676  		amdgpu_device_ip_block_add(adev, &uvd_v3_1_ip_block);
2677  		/* amdgpu_device_ip_block_add(adev, &vce_v1_0_ip_block); */
2678  		break;
2679  	case CHIP_HAINAN:
2680  		amdgpu_device_ip_block_add(adev, &si_common_ip_block);
2681  		amdgpu_device_ip_block_add(adev, &gmc_v6_0_ip_block);
2682  		amdgpu_device_ip_block_add(adev, &si_ih_ip_block);
2683  		amdgpu_device_ip_block_add(adev, &gfx_v6_0_ip_block);
2684  		amdgpu_device_ip_block_add(adev, &si_dma_ip_block);
2685  		amdgpu_device_ip_block_add(adev, &si_smu_ip_block);
2686  		if (adev->enable_virtual_display)
2687  			amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
2688  		break;
2689  	default:
2690  		BUG();
2691  	}
2692  	return 0;
2693  }
2694  
2695