Lines Matching +full:0 +full:x0000001f
34 struct nvkm_fault_buffer *buffer = fault->buffer[0]; in gv100_fault_buffer_process()
45 const u32 instlo = nvkm_ro32(mem, base + 0x00); in gv100_fault_buffer_process()
46 const u32 insthi = nvkm_ro32(mem, base + 0x04); in gv100_fault_buffer_process()
47 const u32 addrlo = nvkm_ro32(mem, base + 0x08); in gv100_fault_buffer_process()
48 const u32 addrhi = nvkm_ro32(mem, base + 0x0c); in gv100_fault_buffer_process()
49 const u32 timelo = nvkm_ro32(mem, base + 0x10); in gv100_fault_buffer_process()
50 const u32 timehi = nvkm_ro32(mem, base + 0x14); in gv100_fault_buffer_process()
51 const u32 info0 = nvkm_ro32(mem, base + 0x18); in gv100_fault_buffer_process()
52 const u32 info1 = nvkm_ro32(mem, base + 0x1c); in gv100_fault_buffer_process()
56 get = 0; in gv100_fault_buffer_process()
62 info.engine = (info0 & 0x000000ff); in gv100_fault_buffer_process()
63 info.valid = (info1 & 0x80000000) >> 31; in gv100_fault_buffer_process()
64 info.gpc = (info1 & 0x1f000000) >> 24; in gv100_fault_buffer_process()
65 info.hub = (info1 & 0x00100000) >> 20; in gv100_fault_buffer_process()
66 info.access = (info1 & 0x000f0000) >> 16; in gv100_fault_buffer_process()
67 info.client = (info1 & 0x00007f00) >> 8; in gv100_fault_buffer_process()
68 info.reason = (info1 & 0x0000001f); in gv100_fault_buffer_process()
79 const u32 intr = buffer->id ? 0x08000000 : 0x20000000; in gv100_fault_buffer_intr()
81 nvkm_mask(device, 0x100a2c, intr, intr); in gv100_fault_buffer_intr()
83 nvkm_mask(device, 0x100a34, intr, intr); in gv100_fault_buffer_intr()
90 const u32 foff = buffer->id * 0x14; in gv100_fault_buffer_fini()
91 nvkm_mask(device, 0x100e34 + foff, 0x80000000, 0x00000000); in gv100_fault_buffer_fini()
98 const u32 foff = buffer->id * 0x14; in gv100_fault_buffer_init()
100 nvkm_mask(device, 0x100e34 + foff, 0xc0000000, 0x40000000); in gv100_fault_buffer_init()
101 nvkm_wr32(device, 0x100e28 + foff, upper_32_bits(buffer->addr)); in gv100_fault_buffer_init()
102 nvkm_wr32(device, 0x100e24 + foff, lower_32_bits(buffer->addr)); in gv100_fault_buffer_init()
103 nvkm_mask(device, 0x100e34 + foff, 0x80000000, 0x80000000); in gv100_fault_buffer_init()
110 const u32 foff = buffer->id * 0x14; in gv100_fault_buffer_info()
112 nvkm_mask(device, 0x100e34 + foff, 0x40000000, 0x40000000); in gv100_fault_buffer_info()
114 buffer->entries = nvkm_rd32(device, 0x100e34 + foff) & 0x000fffff; in gv100_fault_buffer_info()
115 buffer->get = 0x100e2c + foff; in gv100_fault_buffer_info()
116 buffer->put = 0x100e30 + foff; in gv100_fault_buffer_info()
134 const u32 addrlo = nvkm_rd32(device, 0x100e4c); in gv100_fault_intr_fault()
135 const u32 addrhi = nvkm_rd32(device, 0x100e50); in gv100_fault_intr_fault()
136 const u32 info0 = nvkm_rd32(device, 0x100e54); in gv100_fault_intr_fault()
137 const u32 insthi = nvkm_rd32(device, 0x100e58); in gv100_fault_intr_fault()
138 const u32 info1 = nvkm_rd32(device, 0x100e5c); in gv100_fault_intr_fault()
141 info.inst = ((u64)insthi << 32) | (info0 & 0xfffff000); in gv100_fault_intr_fault()
142 info.time = 0; in gv100_fault_intr_fault()
143 info.engine = (info0 & 0x000000ff); in gv100_fault_intr_fault()
144 info.valid = (info1 & 0x80000000) >> 31; in gv100_fault_intr_fault()
145 info.gpc = (info1 & 0x1f000000) >> 24; in gv100_fault_intr_fault()
146 info.hub = (info1 & 0x00100000) >> 20; in gv100_fault_intr_fault()
147 info.access = (info1 & 0x000f0000) >> 16; in gv100_fault_intr_fault()
148 info.client = (info1 & 0x00007f00) >> 8; in gv100_fault_intr_fault()
149 info.reason = (info1 & 0x0000001f); in gv100_fault_intr_fault()
159 u32 stat = nvkm_rd32(device, 0x100a20); in gv100_fault_intr()
161 if (stat & 0x80000000) { in gv100_fault_intr()
163 nvkm_wr32(device, 0x100e60, 0x80000000); in gv100_fault_intr()
164 stat &= ~0x80000000; in gv100_fault_intr()
167 if (stat & 0x20000000) { in gv100_fault_intr()
168 if (fault->buffer[0]) { in gv100_fault_intr()
169 nvkm_event_ntfy(&fault->event, 0, NVKM_FAULT_BUFFER_EVENT_PENDING); in gv100_fault_intr()
170 stat &= ~0x20000000; in gv100_fault_intr()
174 if (stat & 0x08000000) { in gv100_fault_intr()
177 stat &= ~0x08000000; in gv100_fault_intr()
192 if (fault->buffer[0]) in gv100_fault_fini()
193 fault->func->buffer.fini(fault->buffer[0]); in gv100_fault_fini()
195 nvkm_mask(fault->subdev.device, 0x100a34, 0x80000000, 0x80000000); in gv100_fault_fini()
201 nvkm_mask(fault->subdev.device, 0x100a2c, 0x80000000, 0x80000000); in gv100_fault_init()
202 fault->func->buffer.init(fault->buffer[0]); in gv100_fault_init()
209 nvkm_event_ntfy_add(&fault->event, 0, NVKM_FAULT_BUFFER_EVENT_PENDING, true, in gv100_fault_oneinit()
211 return 0; in gv100_fault_oneinit()
233 .user = { { 0, 0, VOLTA_FAULT_BUFFER_A }, 1 },
245 return 0; in gv100_fault_new()