Lines Matching +full:0 +full:x00000033
36 case 0x50: /* it exists, but only has bit 31, not the dividers.. */ in read_div()
37 case 0x84: in read_div()
38 case 0x86: in read_div()
39 case 0x98: in read_div()
40 case 0xa0: in read_div()
41 return nvkm_rd32(device, 0x004700); in read_div()
42 case 0x92: in read_div()
43 case 0x94: in read_div()
44 case 0x96: in read_div()
45 return nvkm_rd32(device, 0x004800); in read_div()
47 return 0x00000000; in read_div()
57 u32 rsel = nvkm_rd32(device, 0x00e18c); in read_pll_src()
61 case 0x50: in read_pll_src()
62 case 0xa0: in read_pll_src()
64 case 0x4020: in read_pll_src()
65 case 0x4028: id = !!(rsel & 0x00000004); break; in read_pll_src()
66 case 0x4008: id = !!(rsel & 0x00000008); break; in read_pll_src()
67 case 0x4030: id = 0; break; in read_pll_src()
70 return 0; in read_pll_src()
73 coef = nvkm_rd32(device, 0x00e81c + (id * 0x0c)); in read_pll_src()
74 ref *= (coef & 0x01000000) ? 2 : 4; in read_pll_src()
75 P = (coef & 0x00070000) >> 16; in read_pll_src()
76 N = ((coef & 0x0000ff00) >> 8) + 1; in read_pll_src()
77 M = ((coef & 0x000000ff) >> 0) + 1; in read_pll_src()
79 case 0x84: in read_pll_src()
80 case 0x86: in read_pll_src()
81 case 0x92: in read_pll_src()
82 coef = nvkm_rd32(device, 0x00e81c); in read_pll_src()
83 P = (coef & 0x00070000) >> 16; in read_pll_src()
84 N = (coef & 0x0000ff00) >> 8; in read_pll_src()
85 M = (coef & 0x000000ff) >> 0; in read_pll_src()
87 case 0x94: in read_pll_src()
88 case 0x96: in read_pll_src()
89 case 0x98: in read_pll_src()
90 rsel = nvkm_rd32(device, 0x00c050); in read_pll_src()
92 case 0x4020: rsel = (rsel & 0x00000003) >> 0; break; in read_pll_src()
93 case 0x4008: rsel = (rsel & 0x0000000c) >> 2; break; in read_pll_src()
94 case 0x4028: rsel = (rsel & 0x00001800) >> 11; break; in read_pll_src()
95 case 0x4030: rsel = 3; break; in read_pll_src()
98 return 0; in read_pll_src()
102 case 0: id = 1; break; in read_pll_src()
105 case 3: id = 0; break; in read_pll_src()
108 coef = nvkm_rd32(device, 0x00e81c + (id * 0x28)); in read_pll_src()
109 P = (nvkm_rd32(device, 0x00e824 + (id * 0x28)) >> 16) & 7; in read_pll_src()
110 P += (coef & 0x00070000) >> 16; in read_pll_src()
111 N = (coef & 0x0000ff00) >> 8; in read_pll_src()
112 M = (coef & 0x000000ff) >> 0; in read_pll_src()
121 return 0; in read_pll_src()
129 u32 src, mast = nvkm_rd32(device, 0x00c040); in read_pll_ref()
132 case 0x004028: in read_pll_ref()
133 src = !!(mast & 0x00200000); in read_pll_ref()
135 case 0x004020: in read_pll_ref()
136 src = !!(mast & 0x00400000); in read_pll_ref()
138 case 0x004008: in read_pll_ref()
139 src = !!(mast & 0x00010000); in read_pll_ref()
141 case 0x004030: in read_pll_ref()
142 src = !!(mast & 0x02000000); in read_pll_ref()
144 case 0x00e810: in read_pll_ref()
148 return 0; in read_pll_ref()
161 u32 mast = nvkm_rd32(device, 0x00c040); in read_pll()
162 u32 ctrl = nvkm_rd32(device, base + 0); in read_pll()
165 u32 freq = 0; in read_pll()
168 if (base == 0x004028 && (mast & 0x00100000)) { in read_pll()
170 if (device->chipset != 0xa0) in read_pll()
174 N2 = (coef & 0xff000000) >> 24; in read_pll()
175 M2 = (coef & 0x00ff0000) >> 16; in read_pll()
176 N1 = (coef & 0x0000ff00) >> 8; in read_pll()
177 M1 = (coef & 0x000000ff); in read_pll()
178 if ((ctrl & 0x80000000) && M1) { in read_pll()
180 if ((ctrl & 0x40000100) == 0x40000000) { in read_pll()
184 freq = 0; in read_pll()
197 u32 mast = nvkm_rd32(device, 0x00c040); in nv50_clk_read()
198 u32 P = 0; in nv50_clk_read()
212 switch (mast & 0x30000000) { in nv50_clk_read()
213 case 0x00000000: return nvkm_clk_read(&clk->base, nv_clk_src_href); in nv50_clk_read()
214 case 0x10000000: break; in nv50_clk_read()
215 case 0x20000000: /* !0x50 */ in nv50_clk_read()
216 case 0x30000000: return nvkm_clk_read(&clk->base, nv_clk_src_hclk); in nv50_clk_read()
220 if (!(mast & 0x00100000)) in nv50_clk_read()
221 P = (nvkm_rd32(device, 0x004028) & 0x00070000) >> 16; in nv50_clk_read()
222 switch (mast & 0x00000003) { in nv50_clk_read()
223 case 0x00000000: return nvkm_clk_read(&clk->base, nv_clk_src_crystal) >> P; in nv50_clk_read()
224 case 0x00000001: return nvkm_clk_read(&clk->base, nv_clk_src_dom6); in nv50_clk_read()
225 case 0x00000002: return read_pll(clk, 0x004020) >> P; in nv50_clk_read()
226 case 0x00000003: return read_pll(clk, 0x004028) >> P; in nv50_clk_read()
230 P = (nvkm_rd32(device, 0x004020) & 0x00070000) >> 16; in nv50_clk_read()
231 switch (mast & 0x00000030) { in nv50_clk_read()
232 case 0x00000000: in nv50_clk_read()
233 if (mast & 0x00000080) in nv50_clk_read()
236 case 0x00000010: break; in nv50_clk_read()
237 case 0x00000020: return read_pll(clk, 0x004028) >> P; in nv50_clk_read()
238 case 0x00000030: return read_pll(clk, 0x004020) >> P; in nv50_clk_read()
242 P = (nvkm_rd32(device, 0x004008) & 0x00070000) >> 16; in nv50_clk_read()
243 if (nvkm_rd32(device, 0x004008) & 0x00000200) { in nv50_clk_read()
244 switch (mast & 0x0000c000) { in nv50_clk_read()
245 case 0x00000000: in nv50_clk_read()
247 case 0x00008000: in nv50_clk_read()
248 case 0x0000c000: in nv50_clk_read()
252 return read_pll(clk, 0x004008) >> P; in nv50_clk_read()
256 P = (read_div(clk) & 0x00000700) >> 8; in nv50_clk_read()
258 case 0x84: in nv50_clk_read()
259 case 0x86: in nv50_clk_read()
260 case 0x92: in nv50_clk_read()
261 case 0x94: in nv50_clk_read()
262 case 0x96: in nv50_clk_read()
263 case 0xa0: in nv50_clk_read()
264 switch (mast & 0x00000c00) { in nv50_clk_read()
265 case 0x00000000: in nv50_clk_read()
266 if (device->chipset == 0xa0) /* wtf?? */ in nv50_clk_read()
269 case 0x00000400: in nv50_clk_read()
270 return 0; in nv50_clk_read()
271 case 0x00000800: in nv50_clk_read()
272 if (mast & 0x01000000) in nv50_clk_read()
273 return read_pll(clk, 0x004028) >> P; in nv50_clk_read()
274 return read_pll(clk, 0x004030) >> P; in nv50_clk_read()
275 case 0x00000c00: in nv50_clk_read()
279 case 0x98: in nv50_clk_read()
280 switch (mast & 0x00000c00) { in nv50_clk_read()
281 case 0x00000000: in nv50_clk_read()
283 case 0x00000400: in nv50_clk_read()
284 return 0; in nv50_clk_read()
285 case 0x00000800: in nv50_clk_read()
287 case 0x00000c00: in nv50_clk_read()
295 case 0x50: in nv50_clk_read()
296 case 0xa0: in nv50_clk_read()
297 return read_pll(clk, 0x00e810) >> 2; in nv50_clk_read()
298 case 0x84: in nv50_clk_read()
299 case 0x86: in nv50_clk_read()
300 case 0x92: in nv50_clk_read()
301 case 0x94: in nv50_clk_read()
302 case 0x96: in nv50_clk_read()
303 case 0x98: in nv50_clk_read()
304 P = (read_div(clk) & 0x00000007) >> 0; in nv50_clk_read()
305 switch (mast & 0x0c000000) { in nv50_clk_read()
306 case 0x00000000: return nvkm_clk_read(&clk->base, nv_clk_src_href); in nv50_clk_read()
307 case 0x04000000: break; in nv50_clk_read()
308 case 0x08000000: return nvkm_clk_read(&clk->base, nv_clk_src_hclk); in nv50_clk_read()
309 case 0x0c000000: in nv50_clk_read()
334 return 0; in calc_pll()
336 pll.vco2.max_freq = 0; in calc_pll()
339 return 0; in calc_pll()
348 for (*div = 0; *div <= 7; (*div)++) { in calc_div()
350 clk1 = clk0 << (*div ? 1 : 0); in calc_div()
379 u32 mastm = 0, mastv = 0; in nv50_clk_calc()
380 u32 divsm = 0, divsv = 0; in nv50_clk_calc()
389 clk_wr32(hwsq, fifo, 0x00000001); /* block fifo */ in nv50_clk_calc()
391 clk_setf(hwsq, 0x10, 0x00); /* disable fb */ in nv50_clk_calc()
392 clk_wait(hwsq, 0x00, 0x01); /* wait for fb disabled */ in nv50_clk_calc()
403 if (device->chipset != 0x98) in nv50_clk_calc()
404 out = read_pll(clk, 0x004030); in nv50_clk_calc()
411 if (device->chipset != 0x98) in nv50_clk_calc()
412 mastv |= 0x00000c00; in nv50_clk_calc()
415 mastv |= 0x00000800; in nv50_clk_calc()
419 mastm |= 0x00000c00; in nv50_clk_calc()
420 divsm |= 0x00000700; in nv50_clk_calc()
428 mastv |= 0x00000000; in nv50_clk_calc()
431 mastv |= 0x08000000; in nv50_clk_calc()
436 mastv |= 0x0c000000; in nv50_clk_calc()
440 mastm |= 0x0c000000; in nv50_clk_calc()
441 divsm |= 0x00000007; in nv50_clk_calc()
447 clk_mask(hwsq, mast, mastm, 0x00000000); in nv50_clk_calc()
454 if (device->chipset < 0x92) in nv50_clk_calc()
455 clk_mask(hwsq, mast, 0x001000b0, 0x00100080); in nv50_clk_calc()
457 clk_mask(hwsq, mast, 0x000000b3, 0x00000081); in nv50_clk_calc()
460 freq = calc_pll(clk, 0x4028, core, &N, &M, &P1); in nv50_clk_calc()
461 if (freq == 0) in nv50_clk_calc()
464 clk_mask(hwsq, nvpll[0], 0xc03f0100, in nv50_clk_calc()
465 0x80000000 | (P1 << 19) | (P1 << 16)); in nv50_clk_calc()
466 clk_mask(hwsq, nvpll[1], 0x0000ffff, (N << 8) | M); in nv50_clk_calc()
475 clk_mask(hwsq, spll[0], 0xc03f0100, (P1 << 19) | (P1 << 16)); in nv50_clk_calc()
476 clk_mask(hwsq, mast, 0x00100033, 0x00000023); in nv50_clk_calc()
478 freq = calc_pll(clk, 0x4020, shader, &N, &M, &P1); in nv50_clk_calc()
479 if (freq == 0) in nv50_clk_calc()
482 clk_mask(hwsq, spll[0], 0xc03f0100, in nv50_clk_calc()
483 0x80000000 | (P1 << 19) | (P1 << 16)); in nv50_clk_calc()
484 clk_mask(hwsq, spll[1], 0x0000ffff, (N << 8) | M); in nv50_clk_calc()
485 clk_mask(hwsq, mast, 0x00100033, 0x00000033); in nv50_clk_calc()
489 clk_setf(hwsq, 0x10, 0x01); /* enable fb */ in nv50_clk_calc()
490 clk_wait(hwsq, 0x00, 0x00); /* wait for fb enabled */ in nv50_clk_calc()
491 clk_wr32(hwsq, fifo, 0x00000000); /* un-block fifo */ in nv50_clk_calc()
492 return 0; in nv50_clk_calc()
523 clk->hwsq.r_fifo = hwsq_reg(0x002504); in nv50_clk_new_()
524 clk->hwsq.r_spll[0] = hwsq_reg(0x004020); in nv50_clk_new_()
525 clk->hwsq.r_spll[1] = hwsq_reg(0x004024); in nv50_clk_new_()
526 clk->hwsq.r_nvpll[0] = hwsq_reg(0x004028); in nv50_clk_new_()
527 clk->hwsq.r_nvpll[1] = hwsq_reg(0x00402c); in nv50_clk_new_()
529 case 0x92: in nv50_clk_new_()
530 case 0x94: in nv50_clk_new_()
531 case 0x96: in nv50_clk_new_()
532 clk->hwsq.r_divs = hwsq_reg(0x004800); in nv50_clk_new_()
535 clk->hwsq.r_divs = hwsq_reg(0x004700); in nv50_clk_new_()
538 clk->hwsq.r_mast = hwsq_reg(0x00c040); in nv50_clk_new_()
539 return 0; in nv50_clk_new_()
549 { nv_clk_src_crystal, 0xff },
550 { nv_clk_src_href , 0xff },
551 { nv_clk_src_core , 0xff, 0, "core", 1000 },
552 { nv_clk_src_shader , 0xff, 0, "shader", 1000 },
553 { nv_clk_src_mem , 0xff, 0, "memory", 1000 },