Home
last modified time | relevance | path

Searched refs:gr (Results 1 – 25 of 116) sorted by relevance

12345

/linux/drivers/gpu/drm/nouveau/nvkm/engine/gr/
H A Dbase.c31 struct nvkm_gr *gr = device->gr; in nvkm_gr_ctxsw_inst() local
32 if (gr && gr->func->ctxsw.inst) in nvkm_gr_ctxsw_inst()
33 return gr->func->ctxsw.inst(gr); in nvkm_gr_ctxsw_inst()
40 struct nvkm_gr *gr = device->gr; in nvkm_gr_ctxsw_resume() local
41 if (gr && gr->func->ctxsw.resume) in nvkm_gr_ctxsw_resume()
42 return gr->func->ctxsw.resume(gr); in nvkm_gr_ctxsw_resume()
49 struct nvkm_gr *gr = device->gr; in nvkm_gr_ctxsw_pause() local
50 if (gr && gr->func->ctxsw.pause) in nvkm_gr_ctxsw_pause()
51 return gr->func->ctxsw.pause(gr); in nvkm_gr_ctxsw_pause()
58 struct nvkm_gr *gr = nvkm_gr(engine); in nvkm_gr_chsw_load() local
[all …]
H A Dgf100.c49 gf100_gr_zbc_clear_color(struct gf100_gr *gr, int zbc) in gf100_gr_zbc_clear_color() argument
51 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_gr_zbc_clear_color()
52 if (gr->zbc_color[zbc].format) { in gf100_gr_zbc_clear_color()
53 nvkm_wr32(device, 0x405804, gr->zbc_color[zbc].ds[0]); in gf100_gr_zbc_clear_color()
54 nvkm_wr32(device, 0x405808, gr->zbc_color[zbc].ds[1]); in gf100_gr_zbc_clear_color()
55 nvkm_wr32(device, 0x40580c, gr->zbc_color[zbc].ds[2]); in gf100_gr_zbc_clear_color()
56 nvkm_wr32(device, 0x405810, gr->zbc_color[zbc].ds[3]); in gf100_gr_zbc_clear_color()
58 nvkm_wr32(device, 0x405814, gr->zbc_color[zbc].format); in gf100_gr_zbc_clear_color()
64 gf100_gr_zbc_color_get(struct gf100_gr *gr, int format, in gf100_gr_zbc_color_get() argument
67 struct nvkm_ltc *ltc = gr->base.engine.subdev.device->ltc; in gf100_gr_zbc_color_get()
[all …]
H A Dctxgf100.c998 nvkm_wr32(chan->gr->base.engine.subdev.device, addr, data); in gf100_grctx_patch_wr32()
1007 gf100_grctx_generate_r419cb8(struct gf100_gr *gr) in gf100_grctx_generate_r419cb8() argument
1009 struct nvkm_device *device = gr->base.engine.subdev.device; in gf100_grctx_generate_r419cb8()
1034 struct gf100_gr *gr = chan->gr; in gf100_grctx_generate_attrib() local
1035 const struct gf100_grctx_func *grctx = gr->func->grctx; in gf100_grctx_generate_attrib()
1042 for (gpc = 0; gpc < gr->gpc_nr; gpc++) { in gf100_grctx_generate_attrib()
1043 for (tpc = 0; tpc < gr->tpc_nr[gpc]; tpc++) { in gf100_grctx_generate_attrib()
1060 gf100_grctx_generate_attrib_cb_size(struct gf100_gr *gr) in gf100_grctx_generate_attrib_cb_size() argument
1062 const struct gf100_grctx_func *grctx = gr->func->grctx; in gf100_grctx_generate_attrib_cb_size()
1064 return 0x20 * (grctx->attrib_nr_max + grctx->alpha_nr_max) * gr->tpc_total; in gf100_grctx_generate_attrib_cb_size()
[all …]
H A Dgm200.c35 gm200_gr_nofw(struct gf100_gr *gr, int ver, const struct gf100_gr_fwif *fwif) in gm200_gr_nofw() argument
37 nvkm_warn(&gr->base.engine.subdev, "firmware unavailable\n"); in gm200_gr_nofw()
92 gm200_gr_rops(struct gf100_gr *gr) in gm200_gr_rops() argument
94 return nvkm_rd32(gr->base.engine.subdev.device, 0x12006c); in gm200_gr_rops()
98 gm200_gr_init_ds_hww_esr_2(struct gf100_gr *gr) in gm200_gr_init_ds_hww_esr_2() argument
100 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_gr_init_ds_hww_esr_2()
106 gm200_gr_init_num_active_ltcs(struct gf100_gr *gr) in gm200_gr_init_num_active_ltcs() argument
108 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_gr_init_num_active_ltcs()
114 gm200_gr_init_gpc_mmu(struct gf100_gr *gr) in gm200_gr_init_gpc_mmu() argument
116 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_gr_init_gpc_mmu()
[all …]
H A Dgk20a.c156 gk20a_gr_wait_mem_scrubbing(struct gf100_gr *gr) in gk20a_gr_wait_mem_scrubbing() argument
158 struct nvkm_subdev *subdev = &gr->base.engine.subdev; in gk20a_gr_wait_mem_scrubbing()
181 gk20a_gr_set_hww_esr_report_mask(struct gf100_gr *gr) in gk20a_gr_set_hww_esr_report_mask() argument
183 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_gr_set_hww_esr_report_mask()
189 gk20a_gr_init(struct gf100_gr *gr) in gk20a_gr_init() argument
191 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_gr_init()
197 gf100_gr_mmio(gr, gr->sw_nonctx); in gk20a_gr_init()
199 ret = gk20a_gr_wait_mem_scrubbing(gr); in gk20a_gr_init()
203 ret = gf100_gr_wait_idle(gr); in gk20a_gr_init()
208 if (gr->func->init_gpc_mmu) in gk20a_gr_init()
[all …]
H A Dctxgv100.c64 struct gf100_gr *gr = chan->gr; in gv100_grctx_generate_attrib() local
65 const struct gf100_grctx_func *grctx = gr->func->grctx; in gv100_grctx_generate_attrib()
70 u32 size = grctx->alpha_nr_max * gr->tpc_total; in gv100_grctx_generate_attrib()
79 for (gpc = 0; gpc < gr->gpc_nr; gpc++) { in gv100_grctx_generate_attrib()
80 for (ppc = 0; ppc < gr->func->ppc_nr; ppc++, n++) { in gv100_grctx_generate_attrib()
81 const u32 as = alpha * gr->ppc_tpc_nr[gpc][ppc]; in gv100_grctx_generate_attrib()
82 const u32 bs = attrib * gr->ppc_tpc_max; in gv100_grctx_generate_attrib()
83 const u32 gs = gfxp * gr->ppc_tpc_max; in gv100_grctx_generate_attrib()
87 if (!(gr->ppc_mask[gpc] & (1 << ppc))) in gv100_grctx_generate_attrib()
96 ao += grctx->alpha_nr_max * gr->ppc_tpc_nr[gpc][ppc]; in gv100_grctx_generate_attrib()
[all …]
H A Dctxgm20b.c27 struct gf100_gr *gr = chan->gr; in gm20b_grctx_generate_main() local
28 struct nvkm_device *device = gr->base.engine.subdev.device; in gm20b_grctx_generate_main()
29 const struct gf100_grctx_func *grctx = gr->func->grctx; in gm20b_grctx_generate_main()
33 gf100_gr_mmio(gr, gr->sw_ctx); in gm20b_grctx_generate_main()
35 gf100_gr_wait_idle(gr); in gm20b_grctx_generate_main()
39 grctx->attrib_cb(chan, chan->attrib_cb->addr, grctx->attrib_cb_size(gr)); in gm20b_grctx_generate_main()
42 grctx->unkn(gr); in gm20b_grctx_generate_main()
44 gf100_grctx_generate_floorsweep(gr); in gm20b_grctx_generate_main()
49 nvkm_wr32(device, 0x405b00, (gr->tpc_total << 8) | gr->gpc_nr); in gm20b_grctx_generate_main()
53 for (tmp = 0, i = 0; i < gr->gpc_nr; i++) in gm20b_grctx_generate_main()
[all …]
H A Dnv10.c401 struct nv10_gr *gr; member
414 #define PIPE_SAVE(gr, state, addr) \ argument
422 #define PIPE_RESTORE(gr, state, addr) \ argument
434 struct nvkm_gr *gr = &chan->gr->base; in nv17_gr_mthd_lma_window() local
445 nv04_gr_idle(gr); in nv17_gr_mthd_lma_window()
452 nv04_gr_idle(gr); in nv17_gr_mthd_lma_window()
462 nv04_gr_idle(gr); in nv17_gr_mthd_lma_window()
485 nv04_gr_idle(gr); in nv17_gr_mthd_lma_window()
500 nv04_gr_idle(gr); in nv17_gr_mthd_lma_window()
507 struct nvkm_gr *gr = &chan->gr->base; in nv17_gr_mthd_lma_enable() local
[all …]
H A Dctxgm200.c31 gm200_grctx_generate_r419a3c(struct gf100_gr *gr) in gm200_grctx_generate_r419a3c() argument
33 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_grctx_generate_r419a3c()
38 gm200_grctx_generate_r418e94(struct gf100_gr *gr) in gm200_grctx_generate_r418e94() argument
40 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_grctx_generate_r418e94()
46 gm200_grctx_generate_smid_config(struct gf100_gr *gr) in gm200_grctx_generate_smid_config() argument
48 struct nvkm_device *device = gr->base.engine.subdev.device; in gm200_grctx_generate_smid_config()
49 const u32 dist_nr = DIV_ROUND_UP(gr->tpc_total, 4); in gm200_grctx_generate_smid_config()
54 for (sm = 0; sm < gr->sm_nr; sm++) { in gm200_grctx_generate_smid_config()
55 const u8 gpc = gr->sm[sm].gpc; in gm200_grctx_generate_smid_config()
56 const u8 tpc = gr->sm[sm].tpc; in gm200_grctx_generate_smid_config()
[all …]
H A Dgv100.c28 gv100_gr_trap_sm(struct gf100_gr *gr, int gpc, int tpc, int sm) in gv100_gr_trap_sm() argument
30 struct nvkm_subdev *subdev = &gr->base.engine.subdev; in gv100_gr_trap_sm()
49 gv100_gr_trap_mp(struct gf100_gr *gr, int gpc, int tpc) in gv100_gr_trap_mp() argument
51 gv100_gr_trap_sm(gr, gpc, tpc, 0); in gv100_gr_trap_mp()
52 gv100_gr_trap_sm(gr, gpc, tpc, 1); in gv100_gr_trap_mp()
56 gv100_gr_init_4188a4(struct gf100_gr *gr) in gv100_gr_init_4188a4() argument
58 struct nvkm_device *device = gr->base.engine.subdev.device; in gv100_gr_init_4188a4()
64 gv100_gr_init_shader_exceptions(struct gf100_gr *gr, int gpc, int tpc) in gv100_gr_init_shader_exceptions() argument
66 struct nvkm_device *device = gr->base.engine.subdev.device; in gv100_gr_init_shader_exceptions()
75 gv100_gr_init_504430(struct gf100_gr *gr, int gpc, int tpc) in gv100_gr_init_504430() argument
[all …]
H A Dctxgk20a.c30 struct gf100_gr *gr = chan->gr; in gk20a_grctx_generate_main() local
31 struct nvkm_device *device = gr->base.engine.subdev.device; in gk20a_grctx_generate_main()
32 const struct gf100_grctx_func *grctx = gr->func->grctx; in gk20a_grctx_generate_main()
36 gf100_gr_mmio(gr, gr->sw_ctx); in gk20a_grctx_generate_main()
38 gf100_gr_wait_idle(gr); in gk20a_grctx_generate_main()
42 grctx->attrib_cb(chan, chan->attrib_cb->addr, grctx->attrib_cb_size(gr)); in gk20a_grctx_generate_main()
45 grctx->unkn(gr); in gk20a_grctx_generate_main()
47 gf100_grctx_generate_floorsweep(gr); in gk20a_grctx_generate_main()
52 nvkm_wr32(device, 0x405b00, (gr->tpc_total << 8) | gr->gpc_nr); in gk20a_grctx_generate_main()
56 gf100_gr_wait_idle(gr); in gk20a_grctx_generate_main()
[all …]
H A Dctxgp100.c44 struct gf100_gr *gr = chan->gr; in gp100_grctx_generate_attrib() local
45 const struct gf100_grctx_func *grctx = gr->func->grctx; in gp100_grctx_generate_attrib()
49 u32 size = grctx->alpha_nr_max * gr->tpc_total; in gp100_grctx_generate_attrib()
58 for (gpc = 0; gpc < gr->gpc_nr; gpc++) { in gp100_grctx_generate_attrib()
59 for (ppc = 0; ppc < gr->func->ppc_nr; ppc++, n++) { in gp100_grctx_generate_attrib()
60 const u32 as = alpha * gr->ppc_tpc_nr[gpc][ppc]; in gp100_grctx_generate_attrib()
61 const u32 bs = attrib * gr->ppc_tpc_max; in gp100_grctx_generate_attrib()
65 if (!(gr->ppc_mask[gpc] & (1 << ppc))) in gp100_grctx_generate_attrib()
71 bo += grctx->attrib_nr_max * gr->ppc_tpc_max; in gp100_grctx_generate_attrib()
74 ao += grctx->alpha_nr_max * gr->ppc_tpc_nr[gpc][ppc]; in gp100_grctx_generate_attrib()
[all …]
H A Dgp100.c33 gp100_gr_zbc_clear_color(struct gf100_gr *gr, int zbc) in gp100_gr_zbc_clear_color() argument
35 struct nvkm_device *device = gr->base.engine.subdev.device; in gp100_gr_zbc_clear_color()
39 if (gr->zbc_color[zbc].format) { in gp100_gr_zbc_clear_color()
40 nvkm_wr32(device, 0x418010 + zoff, gr->zbc_color[zbc].ds[0]); in gp100_gr_zbc_clear_color()
41 nvkm_wr32(device, 0x41804c + zoff, gr->zbc_color[zbc].ds[1]); in gp100_gr_zbc_clear_color()
42 nvkm_wr32(device, 0x418088 + zoff, gr->zbc_color[zbc].ds[2]); in gp100_gr_zbc_clear_color()
43 nvkm_wr32(device, 0x4180c4 + zoff, gr->zbc_color[zbc].ds[3]); in gp100_gr_zbc_clear_color()
48 gr->zbc_color[zbc].format << ((znum % 4) * 7)); in gp100_gr_zbc_clear_color()
52 gp100_gr_zbc_clear_depth(struct gf100_gr *gr, int zbc) in gp100_gr_zbc_clear_depth() argument
54 struct nvkm_device *device = gr->base.engine.subdev.device; in gp100_gr_zbc_clear_depth()
[all …]
H A Dctxgf117.c188 gf117_grctx_generate_dist_skip_table(struct gf100_gr *gr) in gf117_grctx_generate_dist_skip_table() argument
190 struct nvkm_device *device = gr->base.engine.subdev.device; in gf117_grctx_generate_dist_skip_table()
198 gf117_grctx_generate_rop_mapping(struct gf100_gr *gr) in gf117_grctx_generate_rop_mapping() argument
200 struct nvkm_device *device = gr->base.engine.subdev.device; in gf117_grctx_generate_rop_mapping()
207 data[i / 6] |= (gr->tile[i] & 0x07) << ((i % 6) * 5); in gf117_grctx_generate_rop_mapping()
211 ntpcv = gr->tpc_total; in gf117_grctx_generate_rop_mapping()
224 nvkm_wr32(device, 0x418bb8, (gr->tpc_total << 8) | in gf117_grctx_generate_rop_mapping()
225 gr->screen_tile_row_offset); in gf117_grctx_generate_rop_mapping()
230 nvkm_wr32(device, 0x41bfd0, (gr->tpc_total << 8) | in gf117_grctx_generate_rop_mapping()
231 gr->screen_tile_row_offset | data2[0]); in gf117_grctx_generate_rop_mapping()
[all …]
H A Dctxgp102.c33 gp102_grctx_generate_r408840(struct gf100_gr *gr) in gp102_grctx_generate_r408840() argument
35 struct nvkm_device *device = gr->base.engine.subdev.device; in gp102_grctx_generate_r408840()
42 struct gf100_gr *gr = chan->gr; in gp102_grctx_generate_attrib() local
43 const struct gf100_grctx_func *grctx = gr->func->grctx; in gp102_grctx_generate_attrib()
48 u32 size = grctx->alpha_nr_max * gr->tpc_total; in gp102_grctx_generate_attrib()
57 for (gpc = 0; gpc < gr->gpc_nr; gpc++) { in gp102_grctx_generate_attrib()
58 for (ppc = 0; ppc < gr->func->ppc_nr; ppc++, n++) { in gp102_grctx_generate_attrib()
59 const u32 as = alpha * gr->ppc_tpc_nr[gpc][ppc]; in gp102_grctx_generate_attrib()
60 const u32 bs = attrib * gr->ppc_tpc_max; in gp102_grctx_generate_attrib()
61 const u32 gs = gfxp * gr->ppc_tpc_max; in gp102_grctx_generate_attrib()
[all …]
H A Dgp102.c30 gp102_gr_zbc_clear_stencil(struct gf100_gr *gr, int zbc) in gp102_gr_zbc_clear_stencil() argument
32 struct nvkm_device *device = gr->base.engine.subdev.device; in gp102_gr_zbc_clear_stencil()
36 if (gr->zbc_stencil[zbc].format) in gp102_gr_zbc_clear_stencil()
37 nvkm_wr32(device, 0x41815c + zoff, gr->zbc_stencil[zbc].ds); in gp102_gr_zbc_clear_stencil()
40 gr->zbc_stencil[zbc].format << ((znum % 4) * 7)); in gp102_gr_zbc_clear_stencil()
44 gp102_gr_zbc_stencil_get(struct gf100_gr *gr, int format, in gp102_gr_zbc_stencil_get() argument
47 struct nvkm_ltc *ltc = gr->base.engine.subdev.device->ltc; in gp102_gr_zbc_stencil_get()
51 if (gr->zbc_stencil[i].format) { in gp102_gr_zbc_stencil_get()
52 if (gr->zbc_stencil[i].format != format) in gp102_gr_zbc_stencil_get()
54 if (gr->zbc_stencil[i].ds != ds) in gp102_gr_zbc_stencil_get()
[all …]
H A Dctxgk104.c849 gk104_grctx_generate_r418800(struct gf100_gr *gr) in gk104_grctx_generate_r418800() argument
851 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_grctx_generate_r418800()
866 struct nvkm_device *device = chan->gr->base.engine.subdev.device; in gk104_grctx_generate_patch_ltc()
878 const struct gf100_grctx_func *grctx = chan->gr->func->grctx; in gk104_grctx_generate_bundle()
894 gk104_grctx_generate_unkn(struct gf100_gr *gr) in gk104_grctx_generate_unkn() argument
896 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_grctx_generate_unkn()
906 gk104_grctx_generate_r419f78(struct gf100_gr *gr) in gk104_grctx_generate_r419f78() argument
908 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_grctx_generate_r419f78()
915 gk104_grctx_generate_gpc_tpc_nr(struct gf100_gr *gr) in gk104_grctx_generate_gpc_tpc_nr() argument
917 struct nvkm_device *device = gr->base.engine.subdev.device; in gk104_grctx_generate_gpc_tpc_nr()
[all …]
H A Dnv40.c34 nv40_gr_units(struct nvkm_gr *gr) in nv40_gr_units() argument
36 return nvkm_rd32(gr->engine.subdev.device, 0x1540); in nv40_gr_units()
78 struct nv40_gr *gr = chan->gr; in nv40_gr_chan_bind() local
79 int ret = nvkm_gpuobj_new(gr->base.engine.subdev.device, gr->size, in nv40_gr_chan_bind()
84 nv40_grctx_fill(gr->base.engine.subdev.device, *pgpuobj); in nv40_gr_chan_bind()
95 struct nv40_gr *gr = chan->gr; in nv40_gr_chan_fini() local
96 struct nvkm_subdev *subdev = &gr->base.engine.subdev; in nv40_gr_chan_fini()
134 spin_lock_irqsave(&chan->gr->base.engine.lock, flags); in nv40_gr_chan_dtor()
136 spin_unlock_irqrestore(&chan->gr->base.engine.lock, flags); in nv40_gr_chan_dtor()
151 struct nv40_gr *gr = nv40_gr(base); in nv40_gr_chan_new() local
[all …]
H A Dnv20.c20 struct nv20_gr *gr = chan->gr; in nv20_gr_chan_init() local
23 nvkm_kmap(gr->ctxtab); in nv20_gr_chan_init()
24 nvkm_wo32(gr->ctxtab, chan->chid * 4, inst >> 4); in nv20_gr_chan_init()
25 nvkm_done(gr->ctxtab); in nv20_gr_chan_init()
33 struct nv20_gr *gr = chan->gr; in nv20_gr_chan_fini() local
34 struct nvkm_device *device = gr->base.engine.subdev.device; in nv20_gr_chan_fini()
53 nvkm_kmap(gr->ctxtab); in nv20_gr_chan_fini()
54 nvkm_wo32(gr->ctxtab, chan->chid * 4, 0x00000000); in nv20_gr_chan_fini()
55 nvkm_done(gr->ctxtab); in nv20_gr_chan_fini()
78 struct nv20_gr *gr = nv20_gr(base); in nv20_gr_chan_new() local
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/r535/
H A Dgr.c43 struct r535_gr *gr = grc->gr; in r535_gr_chan_dtor() local
45 for (int i = 0; i < gr->ctxbuf_nr; i++) { in r535_gr_chan_dtor()
60 r535_gr_promote_ctx(struct r535_gr *gr, bool golden, struct nvkm_vmm *vmm, in r535_gr_promote_ctx() argument
64 struct nvkm_subdev *subdev = &gr->base.engine.subdev; in r535_gr_promote_ctx()
77 for (int i = 0; i < gr->ctxbuf_nr; i++) { in r535_gr_promote_ctx()
80 const bool alloc = golden || !gr->ctxbuf[i].global; in r535_gr_promote_ctx()
83 entry->bufferId = gr->ctxbuf[i].bufferId; in r535_gr_promote_ctx()
84 entry->bInitialize = gr->ctxbuf[i].init && alloc; in r535_gr_promote_ctx()
87 ret = nvkm_memory_new(device, gr->ctxbuf[i].init ? in r535_gr_promote_ctx()
89 gr->ctxbuf[i].size, 1 << gr->ctxbuf[i].page, in r535_gr_promote_ctx()
[all …]
/linux/drivers/gpu/drm/nouveau/nvkm/subdev/gsp/rm/
H A Dgr.c23 struct r535_gr *gr = container_of(base, typeof(*gr), base); in nvkm_rm_gr_fini() local
25 if (rm->api->gr->scrubber.fini) in nvkm_rm_gr_fini()
26 rm->api->gr->scrubber.fini(gr); in nvkm_rm_gr_fini()
35 struct r535_gr *gr = container_of(base, typeof(*gr), base); in nvkm_rm_gr_init() local
38 if (rm->api->gr->scrubber.init) { in nvkm_rm_gr_init()
39 ret = rm->api->gr->scrubber.init(gr); in nvkm_rm_gr_init()
51 rm->gpu->gr.class.i2m, in nvkm_rm_gr_new()
52 rm->gpu->gr.class.twod, in nvkm_rm_gr_new()
53 rm->gpu->gr.class.threed, in nvkm_rm_gr_new()
54 rm->gpu->gr.class.compute, in nvkm_rm_gr_new()
[all …]
/linux/arch/parisc/kernel/
H A Dptrace.c165 task_regs(child)->gr[0] &= ~USER_PSW_BITS; in arch_ptrace()
166 task_regs(child)->gr[0] |= data; in arch_ptrace()
335 regs->gr[28] = -ENOSYS; in do_syscall_trace_enter()
348 regs->gr[20] = -1UL; in do_syscall_trace_enter()
359 trace_sys_enter(regs, regs->gr[20]); in do_syscall_trace_enter()
364 audit_syscall_entry(regs->gr[20], regs->gr[26], regs->gr[25], in do_syscall_trace_enter()
365 regs->gr[24], regs->gr[23]); in do_syscall_trace_enter()
368 audit_syscall_entry(regs->gr[20] & 0xffffffff, in do_syscall_trace_enter()
369 regs->gr[26] & 0xffffffff, in do_syscall_trace_enter()
370 regs->gr[25] & 0xffffffff, in do_syscall_trace_enter()
[all …]
H A Dasm-offsets.c51 DEFINE(TASK_PT_PSW, offsetof(struct task_struct, thread.regs.gr[ 0])); in main()
52 DEFINE(TASK_PT_GR1, offsetof(struct task_struct, thread.regs.gr[ 1])); in main()
53 DEFINE(TASK_PT_GR2, offsetof(struct task_struct, thread.regs.gr[ 2])); in main()
54 DEFINE(TASK_PT_GR3, offsetof(struct task_struct, thread.regs.gr[ 3])); in main()
55 DEFINE(TASK_PT_GR4, offsetof(struct task_struct, thread.regs.gr[ 4])); in main()
56 DEFINE(TASK_PT_GR5, offsetof(struct task_struct, thread.regs.gr[ 5])); in main()
57 DEFINE(TASK_PT_GR6, offsetof(struct task_struct, thread.regs.gr[ 6])); in main()
58 DEFINE(TASK_PT_GR7, offsetof(struct task_struct, thread.regs.gr[ 7])); in main()
59 DEFINE(TASK_PT_GR8, offsetof(struct task_struct, thread.regs.gr[ 8])); in main()
60 DEFINE(TASK_PT_GR9, offsetof(struct task_struct, thread.regs.gr[ 9])); in main()
[all …]
H A Dsignal.c65 err |= __copy_from_user(regs->gr, sc->sc_gr, sizeof(regs->gr)); in restore_sigcontext()
72 DBG(2, "%s: r28 is %ld\n", __func__, regs->gr[28]); in restore_sigcontext()
81 unsigned long usp = (regs->gr[30] & ~(0x01UL)); in sys_rt_sigreturn()
146 regs->gr[31] = regs->iaoq[0]; in sys_rt_sigreturn()
190 err |= __put_user(regs->gr[31], &sc->sc_iaoq[0]); in setup_sigcontext()
191 err |= __put_user(regs->gr[31]+4, &sc->sc_iaoq[1]); in setup_sigcontext()
195 __func__, regs->gr[31], regs->gr[31]+4); in setup_sigcontext()
204 err |= __copy_to_user(sc->sc_gr, regs->gr, sizeof(regs->gr)); in setup_sigcontext()
207 DBG(1, "%s: r28 is %ld\n", __func__, regs->gr[28]); in setup_sigcontext()
225 usp = (regs->gr[30] & ~(0x01UL)); in setup_rt_frame()
[all …]
/linux/arch/parisc/include/asm/
H A Dsyscall.h17 return regs->gr[20]; in syscall_get_nr()
24 regs->gr[20] = nr; in syscall_set_nr()
31 args[5] = regs->gr[21]; in syscall_get_arguments()
32 args[4] = regs->gr[22]; in syscall_get_arguments()
33 args[3] = regs->gr[23]; in syscall_get_arguments()
34 args[2] = regs->gr[24]; in syscall_get_arguments()
35 args[1] = regs->gr[25]; in syscall_get_arguments()
36 args[0] = regs->gr[26]; in syscall_get_arguments()
43 regs->gr[21] = args[5]; in syscall_set_arguments()
44 regs->gr[22] = args[4]; in syscall_set_arguments()
[all …]

12345