| /freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/ |
| H A D | AArch64InstrAtomics.td | 69 def : Pat<(relaxed_load<atomic_load_az_8> (ro_Xindexed8 GPR64sp:$Rn, GPR64:$Rm, 71 (LDRBBroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend8:$offset)>; 85 def : Pat<(relaxed_load<atomic_load_az_16> (ro_Xindexed16 GPR64sp:$Rn, GPR64:$Rm, 87 (LDRHHroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend16:$extend)>; 101 def : Pat<(relaxed_load<atomic_load_32> (ro_Xindexed32 GPR64sp:$Rn, GPR64:$Rm, 103 (LDRWroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend32:$extend)>; 117 def : Pat<(relaxed_load<atomic_load_64> (ro_Xindexed64 GPR64sp:$Rn, GPR64:$Rm, 119 (LDRXroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend64:$extend)>; 131 def : Pat<(f32 (bitconvert (i32 (relaxed_load<atomic_load_32> (ro_Xindexed32 GPR64sp:$Rn, GPR64:$Rm, 133 (LDRSroX GPR64sp:$Rn, GPR64:$Rm, ro_Xextend32:$extend)>; [all …]
|
| H A D | AArch64InstrGISel.td | 349 def : Pat<(atomic_cmp_swap_i8 GPR64:$addr, GPR32:$desired, GPR32:$new), 350 (CMP_SWAP_8 GPR64:$addr, GPR32:$desired, GPR32:$new)>; 352 def : Pat<(atomic_cmp_swap_i16 GPR64:$addr, GPR32:$desired, GPR32:$new), 353 (CMP_SWAP_16 GPR64:$addr, GPR32:$desired, GPR32:$new)>; 355 def : Pat<(atomic_cmp_swap_i32 GPR64:$addr, GPR32:$desired, GPR32:$new), 356 (CMP_SWAP_32 GPR64:$addr, GPR32:$desired, GPR32:$new)>; 358 def : Pat<(atomic_cmp_swap_i64 GPR64:$addr, GPR64:$desired, GPR64:$new), 359 (CMP_SWAP_64 GPR64:$addr, GPR64:$desired, GPR64:$new)>; 362 def : Pat<(int_aarch64_stlxp GPR64:$lo, GPR64:$hi, GPR64:$addr), 363 (STLXPX GPR64:$lo, GPR64:$hi, GPR64:$addr)>; [all …]
|
| H A D | AArch64InstrInfo.td | 620 def top32Zero: PatLeaf<(i64 GPR64:$src), [{ 630 def topbitsallzero64: PatLeaf<(i64 GPR64:$src), [{ 1044 def PROBED_STACKALLOC : Pseudo<(outs GPR64:$scratch), 1137 def JumpTableDest32 : Pseudo<(outs GPR64:$dst, GPR64sp:$scratch), 1138 (ins GPR64:$table, GPR64:$entry, i32imm:$jti), []>, 1140 def JumpTableDest16 : Pseudo<(outs GPR64:$dst, GPR64sp:$scratch), 1141 (ins GPR64:$table, GPR64:$entry, i32imm:$jti), []>, 1143 def JumpTableDest8 : Pseudo<(outs GPR64:$dst, GPR64sp:$scratch), 1144 (ins GPR64:$table, GPR64:$entry, i32imm:$jti), []>, 1180 def SPACE : Pseudo<(outs GPR64:$Rd), (ins i32imm:$size, GPR64:$Rn), [all …]
|
| H A D | AArch64SVEInstrInfo.td | 1272 def : Pat<(Ty (Load (SVEDup0Undef), nxv2i1:$gp, GPR64:$base, nxv2i64:$offs)), 1273 (!cast<Instruction>(Inst # _SCALED) PPR:$gp, GPR64:$base, ZPR:$offs)>; 1275 …def : Pat<(Ty (Load (SVEDup0Undef), nxv2i1:$gp, GPR64:$base, (sext_inreg nxv2i64:$offs, nxv2i32))), 1276 (!cast<Instruction>(Inst # _SXTW_SCALED) PPR:$gp, GPR64:$base, ZPR:$offs)>; 1278 …def : Pat<(Ty (Load (SVEDup0Undef), nxv2i1:$gp, GPR64:$base, (and nxv2i64:$offs, (nxv2i64 (splat_v… 1279 (!cast<Instruction>(Inst # _UXTW_SCALED) PPR:$gp, GPR64:$base, ZPR:$offs)>; 1287 def : Pat<(Ty (Load (SVEDup0Undef), nxv2i1:$gp, GPR64:$base, nxv2i64:$offs)), 1288 (!cast<Instruction>(Inst) PPR:$gp, GPR64:$base, ZPR:$offs)>; 1290 …def : Pat<(Ty (Load (SVEDup0Undef), nxv2i1:$gp, GPR64:$base, (sext_inreg nxv2i64:$offs, nxv2i32))), 1291 (!cast<Instruction>(Inst # _SXTW) PPR:$gp, GPR64:$base, ZPR:$offs)>; [all …]
|
| H A D | AArch64InstrFormats.td | 292 def GPR64as32 : RegisterOperand<GPR64, "printGPR64as32"> { 1161 def arith_shifted_reg64 : arith_shifted_reg<i64, GPR64, 64>; 1191 def logical_shifted_reg64 : logical_shifted_reg<i64, GPR64, logical_shift64>; 1688 (outs GPR64:$Rt), (ins), asm, "\t$Rt", pattern> { 1697 : RtSystemI<0, (outs), (ins GPR64:$Rt), asm, "\t$Rt", pattern> { 1828 class MRSI : RtSystemI<1, (outs GPR64:$Rt), (ins mrs_sysreg_op:$systemreg), 1843 class MSRI : RtSystemI<0, (outs), (ins msr_sysreg_op:$systemreg, GPR64:$Rt), 1936 (ins imm0_7:$op1, sys_cr_op:$Cn, sys_cr_op:$Cm, imm0_7:$op2, GPR64:$Rt), 1951 (ins GPR64:$Rt, imm0_7:$op1, sys_cr_op:$Cn, sys_cr_op:$Cm, imm0_7:$op2), 1998 : BaseBranchReg<opc, (outs), (ins GPR64:$Rn), asm, "\t$Rn", pattern> { [all …]
|
| H A D | AArch64RegisterInfo.td | 154 // GPR64/GPR64sp for use by the coalescer. 171 def GPR64 : RegisterClass<"AArch64", [i64], 64, (add GPR64common, XZR)> { 172 let AltOrders = [(rotl GPR64, 8)]; 203 // GPR32/GPR64 but with zero-register substitution enabled. 204 // TODO: Roll this out to GPR32/GPR64/GPR32all/GPR64all. 208 def GPR64z : RegisterOperand<GPR64> { 242 def GPR64noip : RegisterClass<"AArch64", [i64], 64, (sub GPR64, X16, X17, LR)> { 254 def GPR64pi1 : RegisterOperand<GPR64, "printPostIncOperand<1>">; 255 def GPR64pi2 : RegisterOperand<GPR64, "printPostIncOperand<2>">; 256 def GPR64pi3 : RegisterOperand<GPR64, "printPostIncOperand<3>">; [all …]
|
| H A D | SVEInstrFormats.td | 922 def : Pat<(i64 (op GPR64:$Rn, (nxv16i1 PPRAny:$Pg))), 924 def : Pat<(i64 (op GPR64:$Rn, (nxv8i1 PPRAny:$Pg))), 926 def : Pat<(i64 (op GPR64:$Rn, (nxv4i1 PPRAny:$Pg))), 928 def : Pat<(i64 (op GPR64:$Rn, (nxv2i1 PPRAny:$Pg))), 932 …def : Pat<(i64 (combine_op GPR64:$Rn, (int_aarch64_sve_cntp_oneuse (nxv16i1 (SVEAllActive)), (nxv1… 934 …def : Pat<(i64 (combine_op GPR64:$Rn, (int_aarch64_sve_cntp_oneuse (nxv8i1 (SVEAllActive)), (nxv8i… 936 …def : Pat<(i64 (combine_op GPR64:$Rn, (int_aarch64_sve_cntp_oneuse (nxv4i1 (SVEAllActive)), (nxv4i… 938 …def : Pat<(i64 (combine_op GPR64:$Rn, (int_aarch64_sve_cntp_oneuse (nxv2i1 (SVEAllActive)), (nxv2i… 942 …def : Pat<(i64 (combine_op GPR64:$Rn, (int_aarch64_sve_cntp_oneuse (nxv16i1 PPRAny:$pred), (nxv16i… 944 …def : Pat<(i64 (combine_op GPR64:$Rn, (int_aarch64_sve_cntp_oneuse (nxv8i1 PPRAny:$pred), (nxv8i1 … [all …]
|
| H A D | AArch64SMEInstrInfo.td | 44 def AllocateZABuffer : Pseudo<(outs GPR64sp:$dst), (ins GPR64:$size), []>, Sched<[WriteI]> {} 46 def : Pat<(i64 (AArch64AllocateZABuffer GPR64:$size)), 52 …def InitTPIDR2Obj : Pseudo<(outs), (ins GPR64:$buffer), [(AArch64InitTPIDR2Obj GPR64:$buffer)]>, S… 179 (ins GPR64:$tpidr2_el0, GPR64sp:$tpidr2obj, i64imm:$restore_routine, variable_ops), []>, 183 (i64 GPR64:$tpidr2_el0), (i64 GPR64sp:$tpidr2obj), (i64 texternalsym:$restore_routine)), 184 (RestoreZAPseudo GPR64:$tpidr2_el0, GPR64sp:$tpidr2obj, texternalsym:$restore_routine)>; 188 (MSR 0xde85, GPR64:$val)>;
|
| H A D | SMEInstrFormats.td | 580 def : Pat<(Load PPR3bAny:$pg, (addr GPR64sp:$base, GPR64:$offset), 589 i32imm:$imm, PPR3bAny:$pg, GPR64sp:$base, GPR64:$offset), []>, 727 def : Pat<(Store PPR3bAny:$pg, (addr GPR64sp:$base, GPR64:$offset), 3245 : I<(outs GPR64:$Rt), (ins ZTR:$ZTt, uimm3s8:$imm3), 3257 : I<(outs ZTR:$ZTt), (ins uimm3s8:$imm3, GPR64:$Rt),
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/Mips/ |
| H A D | Mips64InstrInfo.td | 78 def ATOMIC_LOAD_ADD_I64 : Atomic2Ops<atomic_load_add_i64, GPR64>; 79 def ATOMIC_LOAD_SUB_I64 : Atomic2Ops<atomic_load_sub_i64, GPR64>; 80 def ATOMIC_LOAD_AND_I64 : Atomic2Ops<atomic_load_and_i64, GPR64>; 81 def ATOMIC_LOAD_OR_I64 : Atomic2Ops<atomic_load_or_i64, GPR64>; 82 def ATOMIC_LOAD_XOR_I64 : Atomic2Ops<atomic_load_xor_i64, GPR64>; 83 def ATOMIC_LOAD_NAND_I64 : Atomic2Ops<atomic_load_nand_i64, GPR64>; 84 def ATOMIC_SWAP_I64 : Atomic2Ops<atomic_swap_i64, GPR64>; 85 def ATOMIC_CMP_SWAP_I64 : AtomicCmpSwap<atomic_cmp_swap_i64, GPR64>; 86 def ATOMIC_LOAD_MIN_I64 : Atomic2Ops<atomic_load_min_i64, GPR64>; 87 def ATOMIC_LOAD_MAX_I64 : Atomic2Ops<atomic_load_max_i64, GPR64>; [all …]
|
| H A D | MipsCondMov.td | 204 defm : MovzPats0<GPR32, GPR64, MOVZ_I_I64, SLT, SLTu, SLTi, SLTiu>, 206 defm : MovzPats0<GPR64, GPR32, MOVZ_I_I, SLT64, SLTu64, SLTi64, SLTiu64>, 208 defm : MovzPats0<GPR64, GPR64, MOVZ_I_I64, SLT64, SLTu64, SLTi64, SLTiu64>, 210 defm : MovzPats1<GPR32, GPR64, MOVZ_I_I64, XOR>, 212 defm : MovzPats1<GPR64, GPR32, MOVZ_I64_I, XOR64>, 214 defm : MovzPats1<GPR64, GPR64, MOVZ_I64_I64, XOR64>, 216 defm : MovzPats2<GPR32, GPR64, MOVZ_I_I64, XORi>, 218 defm : MovzPats2<GPR64, GPR32, MOVZ_I64_I, XORi64>, 220 defm : MovzPats2<GPR64, GPR64, MOVZ_I64_I64, XORi64>, 225 defm : MovnPats<GPR32, GPR64, MOVN_I_I64, XOR>, INSN_MIPS4_32_NOT_32R6_64R6, [all …]
|
| H A D | MipsRegisterInfo.td | 354 def GPR64 : RegisterClass<"Mips", [i64], 64, (add 648 def GPR64Opnd : RegisterOperand<GPR64> {
|
| H A D | MipsInstrInfo.td | 2334 def MIPSeh_return64 : MipsPseudo<(outs), (ins GPR64:$spoff, GPR64:$dst), 2335 [(MIPSehret GPR64:$spoff, GPR64:$dst)]>;
|
| H A D | MipsMSAInstrInfo.td | 3913 GPR64), [HasMSA, IsGP64bit]>; 3934 GPR64), [HasMSA, IsGP64bit]>; 3982 GPR64), [HasMSA, IsGP64bit]>; 4019 GPR64),
|
| /freebsd/contrib/llvm-project/lldb/source/Plugins/ABI/X86/ |
| H A D | ABIX86.cpp | 168 #define GPR64(n) \ macro 200 GPR64(8), GPR64(9), GPR64(10), GPR64(11), in makeBaseRegMap() 201 GPR64(12), GPR64(13), GPR64(14), GPR64(15), in makeBaseRegMap()
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/SystemZ/ |
| H A D | SystemZRegisterInfo.td | 67 class GPR64<bits<16> num, string n, GPR32 low, GPR32 high> 75 class GPR128<bits<16> num, string n, GPR64 low, GPR64 high> 86 def R#I#D : GPR64<I, "r"#I, !cast<GPR32>("R"#I#"L"), !cast<GPR32>("R"#I#"H")>, 91 def R#I#Q : GPR128<I, "r"#I, !cast<GPR64>("R"#!add(I, 1)#"D"), 92 !cast<GPR64>("R"#I#"D")>;
|
| H A D | SystemZFrameLowering.cpp | 302 unsigned GPR64, bool IsImplicit) { in addSavedGPR() argument 305 Register GPR32 = RI->getSubReg(GPR64, SystemZ::subreg_l32); in addSavedGPR() 306 bool IsLive = MBB.isLiveIn(GPR64) || MBB.isLiveIn(GPR32); in addSavedGPR() 308 MIB.addReg(GPR64, getImplRegState(IsImplicit) | getKillRegState(!IsLive)); in addSavedGPR() 310 MBB.addLiveIn(GPR64); in addSavedGPR()
|
| /freebsd/contrib/llvm-project/lldb/source/Plugins/Process/Utility/ |
| H A D | RegisterContextFreeBSD_powerpc.cpp | 55 } GPR64; typedef 221 return sizeof(GPR64); in GetGPRSize()
|
| H A D | RegisterInfos_powerpc.h | 188 #define GPR GPR64 200 #define GPR GPR64
|
| /freebsd/contrib/llvm-project/llvm/lib/Target/X86/ |
| H A D | X86CallingConv.td | 131 // __mmask64 (v64i1) --> GPR64 (for x64) or 2 x GPR32 (for IA32) 210 // __mmask64 (v64i1) --> GPR64 (for x64) or 2 x GPR32 (for IA32)
|