Lines Matching full:sf
19 #define A64_VARIANT(sf) \ argument
20 ((sf) ? AARCH64_INSN_VARIANT_64BIT : AARCH64_INSN_VARIANT_32BIT)
23 #define A64_COMP_BRANCH(sf, Rt, offset, type) \ argument
24 aarch64_insn_gen_comp_branch_imm(0, offset, Rt, A64_VARIANT(sf), \
26 #define A64_CBZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, ZERO) argument
27 #define A64_CBNZ(sf, Rt, imm19) A64_COMP_BRANCH(sf, Rt, (imm19) << 2, NONZERO) argument
106 #define A64_SIZE(sf) \ argument
107 ((sf) ? AARCH64_INSN_SIZE_64 : AARCH64_INSN_SIZE_32)
108 #define A64_LSX(sf, Rt, Rn, Rs, type) \ argument
109 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
112 #define A64_LDXR(sf, Rt, Rn) \ argument
113 A64_LSX(sf, Rt, Rn, A64_ZR, LOAD_EX)
115 #define A64_STXR(sf, Rt, Rn, Rs) \ argument
116 A64_LSX(sf, Rt, Rn, Rs, STORE_EX)
118 #define A64_STLXR(sf, Rt, Rn, Rs) \ argument
119 aarch64_insn_gen_load_store_ex(Rt, Rn, Rs, A64_SIZE(sf), \
148 #define A64_ST_OP(sf, Rn, Rs, op) \ argument
150 A64_SIZE(sf), AARCH64_INSN_MEM_ATOMIC_##op, \
153 #define A64_STADD(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, ADD) argument
154 #define A64_STCLR(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, CLR) argument
155 #define A64_STEOR(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, EOR) argument
156 #define A64_STSET(sf, Rn, Rs) A64_ST_OP(sf, Rn, Rs, SET) argument
158 #define A64_LD_OP_AL(sf, Rt, Rn, Rs, op) \ argument
160 A64_SIZE(sf), AARCH64_INSN_MEM_ATOMIC_##op, \
163 #define A64_LDADDAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, ADD) argument
164 #define A64_LDCLRAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, CLR) argument
165 #define A64_LDEORAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, EOR) argument
166 #define A64_LDSETAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, SET) argument
168 #define A64_SWPAL(sf, Rt, Rn, Rs) A64_LD_OP_AL(sf, Rt, Rn, Rs, SWP) argument
170 #define A64_CASAL(sf, Rt, Rn, Rs) \ argument
171 aarch64_insn_gen_cas(Rt, Rn, Rs, A64_SIZE(sf), \
175 #define A64_ADDSUB_IMM(sf, Rd, Rn, imm12, type) \ argument
177 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
179 #define A64_ADD_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD) argument
180 #define A64_SUB_I(sf, Rd, Rn, imm12) A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB) argument
181 #define A64_ADDS_I(sf, Rd, Rn, imm12) \ argument
182 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, ADD_SETFLAGS)
183 #define A64_SUBS_I(sf, Rd, Rn, imm12) \ argument
184 A64_ADDSUB_IMM(sf, Rd, Rn, imm12, SUB_SETFLAGS)
186 #define A64_CMN_I(sf, Rn, imm12) A64_ADDS_I(sf, A64_ZR, Rn, imm12) argument
188 #define A64_CMP_I(sf, Rn, imm12) A64_SUBS_I(sf, A64_ZR, Rn, imm12) argument
190 #define A64_MOV(sf, Rd, Rn) A64_ADD_I(sf, Rd, Rn, 0) argument
193 #define A64_BITFIELD(sf, Rd, Rn, immr, imms, type) \ argument
195 A64_VARIANT(sf), AARCH64_INSN_BITFIELD_MOVE_##type)
197 #define A64_SBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, SIGNED) argument
199 #define A64_UBFM(sf, Rd, Rn, ir, is) A64_BITFIELD(sf, Rd, Rn, ir, is, UNSIGNED) argument
202 #define A64_LSL(sf, Rd, Rn, shift) ({ \ argument
203 int sz = (sf) ? 64 : 32; \
204 A64_UBFM(sf, Rd, Rn, (unsigned)-(shift) % sz, sz - 1 - (shift)); \
207 #define A64_LSR(sf, Rd, Rn, shift) A64_UBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31) argument
209 #define A64_ASR(sf, Rd, Rn, shift) A64_SBFM(sf, Rd, Rn, shift, (sf) ? 63 : 31) argument
212 #define A64_UXTH(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 15) argument
213 #define A64_UXTW(sf, Rd, Rn) A64_UBFM(sf, Rd, Rn, 0, 31) argument
216 #define A64_SXTB(sf, Rd, Rn) A64_SBFM(sf, Rd, Rn, 0, 7) argument
217 #define A64_SXTH(sf, Rd, Rn) A64_SBFM(sf, Rd, Rn, 0, 15) argument
218 #define A64_SXTW(sf, Rd, Rn) A64_SBFM(sf, Rd, Rn, 0, 31) argument
221 #define A64_MOVEW(sf, Rd, imm16, shift, type) \ argument
223 A64_VARIANT(sf), AARCH64_INSN_MOVEWIDE_##type)
227 #define A64_MOVN(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, INVERSE) argument
228 #define A64_MOVZ(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, ZERO) argument
229 #define A64_MOVK(sf, Rd, imm16, shift) A64_MOVEW(sf, Rd, imm16, shift, KEEP) argument
232 #define A64_ADDSUB_SREG(sf, Rd, Rn, Rm, type) \ argument
234 A64_VARIANT(sf), AARCH64_INSN_ADSB_##type)
236 #define A64_ADD(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, ADD) argument
237 #define A64_SUB(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB) argument
238 #define A64_SUBS(sf, Rd, Rn, Rm) A64_ADDSUB_SREG(sf, Rd, Rn, Rm, SUB_SETFLAGS) argument
240 #define A64_NEG(sf, Rd, Rm) A64_SUB(sf, Rd, A64_ZR, Rm) argument
242 #define A64_CMP(sf, Rn, Rm) A64_SUBS(sf, A64_ZR, Rn, Rm) argument
245 #define A64_DATA1(sf, Rd, Rn, type) aarch64_insn_gen_data1(Rd, Rn, \ argument
246 A64_VARIANT(sf), AARCH64_INSN_DATA1_##type)
248 #define A64_REV16(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_16) argument
249 #define A64_REV32(sf, Rd, Rn) A64_DATA1(sf, Rd, Rn, REVERSE_32) argument
254 #define A64_DATA2(sf, Rd, Rn, Rm, type) aarch64_insn_gen_data2(Rd, Rn, Rm, \ argument
255 A64_VARIANT(sf), AARCH64_INSN_DATA2_##type)
256 #define A64_UDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, UDIV) argument
257 #define A64_SDIV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, SDIV) argument
258 #define A64_LSLV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSLV) argument
259 #define A64_LSRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, LSRV) argument
260 #define A64_ASRV(sf, Rd, Rn, Rm) A64_DATA2(sf, Rd, Rn, Rm, ASRV) argument
264 #define A64_MADD(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \ argument
265 A64_VARIANT(sf), AARCH64_INSN_DATA3_MADD)
267 #define A64_MSUB(sf, Rd, Ra, Rn, Rm) aarch64_insn_gen_data3(Rd, Ra, Rn, Rm, \ argument
268 A64_VARIANT(sf), AARCH64_INSN_DATA3_MSUB)
270 #define A64_MUL(sf, Rd, Rn, Rm) A64_MADD(sf, Rd, A64_ZR, Rn, Rm) argument
273 #define A64_LOGIC_SREG(sf, Rd, Rn, Rm, type) \ argument
275 A64_VARIANT(sf), AARCH64_INSN_LOGIC_##type)
277 #define A64_AND(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND) argument
278 #define A64_ORR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, ORR) argument
279 #define A64_EOR(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, EOR) argument
280 #define A64_ANDS(sf, Rd, Rn, Rm) A64_LOGIC_SREG(sf, Rd, Rn, Rm, AND_SETFLAGS) argument
282 #define A64_TST(sf, Rn, Rm) A64_ANDS(sf, A64_ZR, Rn, Rm) argument
284 #define A64_MVN(sf, Rd, Rm) \ argument
285 A64_LOGIC_SREG(sf, Rd, A64_ZR, Rm, ORN)
288 #define A64_LOGIC_IMM(sf, Rd, Rn, imm, type) ({ \ argument
289 u64 imm64 = (sf) ? (u64)imm : (u64)(u32)imm; \
291 A64_VARIANT(sf), Rn, Rd, imm64); \
294 #define A64_AND_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND) argument
295 #define A64_ORR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, ORR) argument
296 #define A64_EOR_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, EOR) argument
297 #define A64_ANDS_I(sf, Rd, Rn, imm) A64_LOGIC_IMM(sf, Rd, Rn, imm, AND_SETFLAGS) argument
299 #define A64_TST_I(sf, Rn, imm) A64_ANDS_I(sf, A64_ZR, Rn, imm) argument