Searched refs:vxrs (Results 1 – 9 of 9) sorted by relevance
/linux/arch/s390/kernel/ |
H A D | fpu.c | 15 __vector128 *vxrs = state->vxrs; in __kernel_fpu_begin() local 27 save_fp_regs_vx(vxrs); in __kernel_fpu_begin() 32 vxrs += fpu_vstm(0, 15, vxrs); in __kernel_fpu_begin() 33 vxrs += fpu_vstm(16, 31, vxrs); in __kernel_fpu_begin() 37 vxrs += fpu_vstm(8, 23, vxrs); in __kernel_fpu_begin() 43 vxrs += fpu_vstm(0, 15, vxrs); in __kernel_fpu_begin() 45 vxrs += fpu_vstm(0, 7, vxrs); in __kernel_fpu_begin() 47 vxrs += fpu_vstm(8, 15, vxrs); in __kernel_fpu_begin() 52 vxrs += fpu_vstm(16, 31, vxrs); in __kernel_fpu_begin() 54 vxrs += fpu_vstm(16, 23, vxrs); in __kernel_fpu_begin() [all …]
|
H A D | compat_signal.c | 126 __u64 vxrs[__NUM_VXRS_LOW]; in save_sigregs_ext32() local 139 vxrs[i] = current->thread.ufpu.vxrs[i].low; in save_sigregs_ext32() 140 if (__copy_to_user(&sregs_ext->vxrs_low, vxrs, in save_sigregs_ext32() 143 current->thread.ufpu.vxrs + __NUM_VXRS_LOW, in save_sigregs_ext32() 154 __u64 vxrs[__NUM_VXRS_LOW]; in restore_sigregs_ext32() local 166 if (__copy_from_user(vxrs, &sregs_ext->vxrs_low, in restore_sigregs_ext32() 168 __copy_from_user(current->thread.ufpu.vxrs + __NUM_VXRS_LOW, in restore_sigregs_ext32() 173 current->thread.ufpu.vxrs[i].low = vxrs[i]; in restore_sigregs_ext32()
|
H A D | signal.c | 178 __u64 vxrs[__NUM_VXRS_LOW]; in save_sigregs_ext() local 184 vxrs[i] = current->thread.ufpu.vxrs[i].low; in save_sigregs_ext() 185 if (__copy_to_user(&sregs_ext->vxrs_low, vxrs, in save_sigregs_ext() 188 current->thread.ufpu.vxrs + __NUM_VXRS_LOW, in save_sigregs_ext() 198 __u64 vxrs[__NUM_VXRS_LOW]; in restore_sigregs_ext() local 203 if (__copy_from_user(vxrs, &sregs_ext->vxrs_low, in restore_sigregs_ext() 205 __copy_from_user(current->thread.ufpu.vxrs + __NUM_VXRS_LOW, in restore_sigregs_ext() 210 current->thread.ufpu.vxrs[i].low = vxrs[i]; in restore_sigregs_ext()
|
H A D | ptrace.c | 258 tmp = *(addr_t *)((addr_t)child->thread.ufpu.vxrs + 2 * offset); in __peek_user() 399 *(addr_t *)((addr_t)child->thread.ufpu.vxrs + 2 * offset) = data; in __poke_user() 619 tmp = *(__u32 *)((addr_t)child->thread.ufpu.vxrs + 2 * offset); in __peek_user_compat() 738 *(__u32 *)((addr_t)child->thread.ufpu.vxrs + 2 * offset) = tmp; in __poke_user_compat() 887 convert_vx_to_fp(fprs, target->thread.ufpu.vxrs); in s390_fpregs_set() 904 convert_fp_to_vx(target->thread.ufpu.vxrs, fprs); in s390_fpregs_set() 948 __u64 vxrs[__NUM_VXRS_LOW]; in s390_vxrs_low_get() local 956 vxrs[i] = target->thread.ufpu.vxrs[i].low; in s390_vxrs_low_get() 957 return membuf_write(&to, vxrs, sizeof(vxrs)); in s390_vxrs_low_get() 965 __u64 vxrs[__NUM_VXRS_LOW]; in s390_vxrs_low_set() local [all …]
|
H A D | perf_regs.c | 22 fp = *(freg_t *)(current->thread.ufpu.vxrs + idx); in perf_reg_value()
|
/linux/arch/s390/include/asm/ |
H A D | fpu.h | 87 static __always_inline void save_vx_regs(__vector128 *vxrs) in save_vx_regs() argument 89 fpu_vstm(0, 15, &vxrs[0]); in save_vx_regs() 90 fpu_vstm(16, 31, &vxrs[16]); in save_vx_regs() 93 static __always_inline void load_vx_regs(__vector128 *vxrs) in load_vx_regs() argument 95 fpu_vlm(0, 15, &vxrs[0]); in load_vx_regs() 96 fpu_vlm(16, 31, &vxrs[16]); in load_vx_regs() 149 static __always_inline void save_fp_regs_vx(__vector128 *vxrs) in save_fp_regs_vx() argument 151 freg_t *fprs = (freg_t *)&vxrs[0].high; in save_fp_regs_vx() 156 static __always_inline void load_fp_regs_vx(__vector128 *vxrs) in load_fp_regs_vx() argument 158 freg_t *fprs = (freg_t *)&vxrs[0].high; in load_fp_regs_vx() [all …]
|
H A D | fpu-types.h | 16 __vector128 vxrs[__NUM_VXRS] __aligned(8); member 26 __vector128 vxrs[] __aligned(8); member 32 __vector128 vxrs[vxr_size] __aligned(8); \
|
H A D | fpu-insn.h | 291 : [vxrs] "R" (*_v), \ 309 : [vxrs] "Q" (*_v), \ 436 : [vxrs] "=R" (*_v) \ 453 : [vxrs] "=Q" (*_v) \
|
H A D | ipl.h | 62 void __init save_area_add_vxrs(struct save_area *, __vector128 *vxrs);
|