Lines Matching +full:flags +full:- +full:mask

1 // SPDX-License-Identifier: GPL-2.0
3 * In-kernel vector facility support functions
13 void __kernel_fpu_begin(struct kernel_fpu *state, int flags) in __kernel_fpu_begin() argument
15 __vector128 *vxrs = state->vxrs; in __kernel_fpu_begin()
16 int mask; in __kernel_fpu_begin() local
22 flags &= state->hdr.mask; in __kernel_fpu_begin()
23 if (flags & KERNEL_FPC) in __kernel_fpu_begin()
24 fpu_stfpc(&state->hdr.fpc); in __kernel_fpu_begin()
26 if (flags & KERNEL_VXR_LOW) in __kernel_fpu_begin()
30 mask = flags & KERNEL_VXR; in __kernel_fpu_begin()
31 if (mask == KERNEL_VXR) { in __kernel_fpu_begin()
36 if (mask == KERNEL_VXR_MID) { in __kernel_fpu_begin()
40 mask = flags & KERNEL_VXR_LOW; in __kernel_fpu_begin()
41 if (mask) { in __kernel_fpu_begin()
42 if (mask == KERNEL_VXR_LOW) in __kernel_fpu_begin()
44 else if (mask == KERNEL_VXR_V0V7) in __kernel_fpu_begin()
49 mask = flags & KERNEL_VXR_HIGH; in __kernel_fpu_begin()
50 if (mask) { in __kernel_fpu_begin()
51 if (mask == KERNEL_VXR_HIGH) in __kernel_fpu_begin()
53 else if (mask == KERNEL_VXR_V16V23) in __kernel_fpu_begin()
61 void __kernel_fpu_end(struct kernel_fpu *state, int flags) in __kernel_fpu_end() argument
63 __vector128 *vxrs = state->vxrs; in __kernel_fpu_end()
64 int mask; in __kernel_fpu_end() local
71 flags &= state->hdr.mask; in __kernel_fpu_end()
72 if (flags & KERNEL_FPC) in __kernel_fpu_end()
73 fpu_lfpc(&state->hdr.fpc); in __kernel_fpu_end()
75 if (flags & KERNEL_VXR_LOW) in __kernel_fpu_end()
79 mask = flags & KERNEL_VXR; in __kernel_fpu_end()
80 if (mask == KERNEL_VXR) { in __kernel_fpu_end()
85 if (mask == KERNEL_VXR_MID) { in __kernel_fpu_end()
89 mask = flags & KERNEL_VXR_LOW; in __kernel_fpu_end()
90 if (mask) { in __kernel_fpu_end()
91 if (mask == KERNEL_VXR_LOW) in __kernel_fpu_end()
93 else if (mask == KERNEL_VXR_V0V7) in __kernel_fpu_end()
98 mask = flags & KERNEL_VXR_HIGH; in __kernel_fpu_end()
99 if (mask) { in __kernel_fpu_end()
100 if (mask == KERNEL_VXR_HIGH) in __kernel_fpu_end()
102 else if (mask == KERNEL_VXR_V16V23) in __kernel_fpu_end()
110 void load_fpu_state(struct fpu *state, int flags) in load_fpu_state() argument
112 __vector128 *vxrs = &state->vxrs[0]; in load_fpu_state()
113 int mask; in load_fpu_state() local
115 if (flags & KERNEL_FPC) in load_fpu_state()
116 fpu_lfpc_safe(&state->fpc); in load_fpu_state()
118 if (flags & KERNEL_VXR_V0V7) in load_fpu_state()
119 load_fp_regs_vx(state->vxrs); in load_fpu_state()
122 mask = flags & KERNEL_VXR; in load_fpu_state()
123 if (mask == KERNEL_VXR) { in load_fpu_state()
128 if (mask == KERNEL_VXR_MID) { in load_fpu_state()
132 mask = flags & KERNEL_VXR_LOW; in load_fpu_state()
133 if (mask) { in load_fpu_state()
134 if (mask == KERNEL_VXR_LOW) in load_fpu_state()
136 else if (mask == KERNEL_VXR_V0V7) in load_fpu_state()
141 mask = flags & KERNEL_VXR_HIGH; in load_fpu_state()
142 if (mask) { in load_fpu_state()
143 if (mask == KERNEL_VXR_HIGH) in load_fpu_state()
145 else if (mask == KERNEL_VXR_V16V23) in load_fpu_state()
152 void save_fpu_state(struct fpu *state, int flags) in save_fpu_state() argument
154 __vector128 *vxrs = &state->vxrs[0]; in save_fpu_state()
155 int mask; in save_fpu_state() local
157 if (flags & KERNEL_FPC) in save_fpu_state()
158 fpu_stfpc(&state->fpc); in save_fpu_state()
160 if (flags & KERNEL_VXR_LOW) in save_fpu_state()
161 save_fp_regs_vx(state->vxrs); in save_fpu_state()
164 mask = flags & KERNEL_VXR; in save_fpu_state()
165 if (mask == KERNEL_VXR) { in save_fpu_state()
170 if (mask == KERNEL_VXR_MID) { in save_fpu_state()
174 mask = flags & KERNEL_VXR_LOW; in save_fpu_state()
175 if (mask) { in save_fpu_state()
176 if (mask == KERNEL_VXR_LOW) in save_fpu_state()
178 else if (mask == KERNEL_VXR_V0V7) in save_fpu_state()
183 mask = flags & KERNEL_VXR_HIGH; in save_fpu_state()
184 if (mask) { in save_fpu_state()
185 if (mask == KERNEL_VXR_HIGH) in save_fpu_state()
187 else if (mask == KERNEL_VXR_V16V23) in save_fpu_state()