Lines Matching full:aarch64

1 //===- AArch64InstrInfo.cpp - AArch64 Instruction Information -------------===//
9 // This file contains the AArch64 implementation of the TargetInstrInfo class.
67 "aarch64-tbz-offset-bits", cl::Hidden, cl::init(14),
71 "aarch64-cbz-offset-bits", cl::Hidden, cl::init(19),
75 BCCDisplacementBits("aarch64-bcc-offset-bits", cl::Hidden, cl::init(19),
79 BDisplacementBits("aarch64-b-offset-bits", cl::Hidden, cl::init(26),
83 : AArch64GenInstrInfo(AArch64::ADJCALLSTACKDOWN, AArch64::ADJCALLSTACKUP, in AArch64InstrInfo()
84 AArch64::CATCHRET), in AArch64InstrInfo()
97 if (Op == AArch64::INLINEASM || Op == AArch64::INLINEASM_BR) in getInstSizeInBytes()
111 // llvm/lib/Target/AArch64/AArch64InstrInfo.td (default case). in getInstSizeInBytes()
157 case AArch64::SPACE: in getInstSizeInBytes()
185 case AArch64::Bcc: in parseCondBranch()
189 case AArch64::CBZW: in parseCondBranch()
190 case AArch64::CBZX: in parseCondBranch()
191 case AArch64::CBNZW: in parseCondBranch()
192 case AArch64::CBNZX: in parseCondBranch()
198 case AArch64::TBZW: in parseCondBranch()
199 case AArch64::TBZX: in parseCondBranch()
200 case AArch64::TBNZW: in parseCondBranch()
201 case AArch64::TBNZX: in parseCondBranch()
214 case AArch64::B: in getBranchDisplacementBits()
216 case AArch64::TBNZW: in getBranchDisplacementBits()
217 case AArch64::TBZW: in getBranchDisplacementBits()
218 case AArch64::TBNZX: in getBranchDisplacementBits()
219 case AArch64::TBZX: in getBranchDisplacementBits()
221 case AArch64::CBNZW: in getBranchDisplacementBits()
222 case AArch64::CBZW: in getBranchDisplacementBits()
223 case AArch64::CBNZX: in getBranchDisplacementBits()
224 case AArch64::CBZX: in getBranchDisplacementBits()
226 case AArch64::Bcc: in getBranchDisplacementBits()
244 case AArch64::B: in getBranchDestBlock()
246 case AArch64::TBZW: in getBranchDestBlock()
247 case AArch64::TBNZW: in getBranchDestBlock()
248 case AArch64::TBZX: in getBranchDestBlock()
249 case AArch64::TBNZX: in getBranchDestBlock()
251 case AArch64::CBZW: in getBranchDestBlock()
252 case AArch64::CBNZW: in getBranchDestBlock()
253 case AArch64::CBZX: in getBranchDestBlock()
254 case AArch64::CBNZX: in getBranchDestBlock()
255 case AArch64::Bcc: in getBranchDestBlock()
280 BuildMI(MBB, MBB.end(), DL, get(AArch64::ADRP), Reg) in insertIndirectBranch()
282 BuildMI(MBB, MBB.end(), DL, get(AArch64::ADDXri), Reg) in insertIndirectBranch()
286 BuildMI(MBB, MBB.end(), DL, get(AArch64::BR)).addReg(Reg); in insertIndirectBranch()
292 constexpr Register Reg = AArch64::X16; in insertIndirectBranch()
301 Register Scavenged = RS->FindUnusedReg(&AArch64::GPR64RegClass); in insertIndirectBranch()
302 if (Scavenged != AArch64::NoRegister && in insertIndirectBranch()
317 BuildMI(MBB, MBB.end(), DL, get(AArch64::STRXpre)) in insertIndirectBranch()
318 .addReg(AArch64::SP, RegState::Define) in insertIndirectBranch()
320 .addReg(AArch64::SP) in insertIndirectBranch()
323 BuildMI(MBB, MBB.end(), DL, get(AArch64::B)).addMBB(&RestoreBB); in insertIndirectBranch()
325 BuildMI(RestoreBB, RestoreBB.end(), DL, get(AArch64::LDRXpost)) in insertIndirectBranch()
326 .addReg(AArch64::SP, RegState::Define) in insertIndirectBranch()
328 .addReg(AArch64::SP) in insertIndirectBranch()
344 if (I->getOpcode() == AArch64::SpeculationBarrierISBDSBEndBB || in analyzeBranch()
345 I->getOpcode() == AArch64::SpeculationBarrierSBEndBB) { in analyzeBranch()
461 if (I->getOpcode() == AArch64::SpeculationBarrierISBDSBEndBB || in analyzeBranchPredicate()
462 I->getOpcode() == AArch64::SpeculationBarrierSBEndBB) { in analyzeBranchPredicate()
478 case AArch64::CBZW: in analyzeBranchPredicate()
479 case AArch64::CBZX: in analyzeBranchPredicate()
480 case AArch64::CBNZW: in analyzeBranchPredicate()
481 case AArch64::CBNZX: in analyzeBranchPredicate()
494 MBP.Predicate = LastOpc == AArch64::CBNZX ? MachineBranchPredicate::PRED_NE in analyzeBranchPredicate()
510 case AArch64::CBZW: in reverseBranchCondition()
511 Cond[1].setImm(AArch64::CBNZW); in reverseBranchCondition()
513 case AArch64::CBNZW: in reverseBranchCondition()
514 Cond[1].setImm(AArch64::CBZW); in reverseBranchCondition()
516 case AArch64::CBZX: in reverseBranchCondition()
517 Cond[1].setImm(AArch64::CBNZX); in reverseBranchCondition()
519 case AArch64::CBNZX: in reverseBranchCondition()
520 Cond[1].setImm(AArch64::CBZX); in reverseBranchCondition()
522 case AArch64::TBZW: in reverseBranchCondition()
523 Cond[1].setImm(AArch64::TBNZW); in reverseBranchCondition()
525 case AArch64::TBNZW: in reverseBranchCondition()
526 Cond[1].setImm(AArch64::TBZW); in reverseBranchCondition()
528 case AArch64::TBZX: in reverseBranchCondition()
529 Cond[1].setImm(AArch64::TBNZX); in reverseBranchCondition()
531 case AArch64::TBNZX: in reverseBranchCondition()
532 Cond[1].setImm(AArch64::TBZX); in reverseBranchCondition()
580 BuildMI(&MBB, DL, get(AArch64::Bcc)).addImm(Cond[0].getImm()).addMBB(TBB); in instantiateCondBranch()
600 BuildMI(&MBB, DL, get(AArch64::B)).addMBB(TBB); in insertBranch()
612 BuildMI(&MBB, DL, get(AArch64::B)).addMBB(FBB); in insertBranch()
640 bool Is64Bit = AArch64::GPR64allRegClass.hasSubClassEq(MRI.getRegClass(VReg)); in canFoldIntoCSel()
645 case AArch64::ADDSXri: in canFoldIntoCSel()
646 case AArch64::ADDSWri: in canFoldIntoCSel()
648 if (DefMI->findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, in canFoldIntoCSel()
653 case AArch64::ADDXri: in canFoldIntoCSel()
654 case AArch64::ADDWri: in canFoldIntoCSel()
660 Opc = Is64Bit ? AArch64::CSINCXr : AArch64::CSINCWr; in canFoldIntoCSel()
663 case AArch64::ORNXrr: in canFoldIntoCSel()
664 case AArch64::ORNWrr: { in canFoldIntoCSel()
667 if (ZReg != AArch64::XZR && ZReg != AArch64::WZR) in canFoldIntoCSel()
670 Opc = Is64Bit ? AArch64::CSINVXr : AArch64::CSINVWr; in canFoldIntoCSel()
674 case AArch64::SUBSXrr: in canFoldIntoCSel()
675 case AArch64::SUBSWrr: in canFoldIntoCSel()
677 if (DefMI->findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, in canFoldIntoCSel()
682 case AArch64::SUBXrr: in canFoldIntoCSel()
683 case AArch64::SUBWrr: { in canFoldIntoCSel()
686 if (ZReg != AArch64::XZR && ZReg != AArch64::WZR) in canFoldIntoCSel()
689 Opc = Is64Bit ? AArch64::CSNEGXr : AArch64::CSNEGWr; in canFoldIntoCSel()
726 if (AArch64::GPR64allRegClass.hasSubClassEq(RC) || in canInsertSelect()
727 AArch64::GPR32allRegClass.hasSubClassEq(RC)) { in canInsertSelect()
740 if (AArch64::FPR64RegClass.hasSubClassEq(RC) || in canInsertSelect()
741 AArch64::FPR32RegClass.hasSubClassEq(RC)) { in canInsertSelect()
772 case AArch64::CBZW: in insertSelect()
776 case AArch64::CBZX: in insertSelect()
780 case AArch64::CBNZW: in insertSelect()
784 case AArch64::CBNZX: in insertSelect()
792 MRI.constrainRegClass(SrcReg, &AArch64::GPR64spRegClass); in insertSelect()
793 BuildMI(MBB, I, DL, get(AArch64::SUBSXri), AArch64::XZR) in insertSelect()
798 MRI.constrainRegClass(SrcReg, &AArch64::GPR32spRegClass); in insertSelect()
799 BuildMI(MBB, I, DL, get(AArch64::SUBSWri), AArch64::WZR) in insertSelect()
811 case AArch64::TBZW: in insertSelect()
812 case AArch64::TBZX: in insertSelect()
815 case AArch64::TBNZW: in insertSelect()
816 case AArch64::TBNZX: in insertSelect()
821 if (Cond[1].getImm() == AArch64::TBZW || Cond[1].getImm() == AArch64::TBNZW) in insertSelect()
822 BuildMI(MBB, I, DL, get(AArch64::ANDSWri), AArch64::WZR) in insertSelect()
827 BuildMI(MBB, I, DL, get(AArch64::ANDSXri), AArch64::XZR) in insertSelect()
838 if (MRI.constrainRegClass(DstReg, &AArch64::GPR64RegClass)) { in insertSelect()
839 RC = &AArch64::GPR64RegClass; in insertSelect()
840 Opc = AArch64::CSELXr; in insertSelect()
842 } else if (MRI.constrainRegClass(DstReg, &AArch64::GPR32RegClass)) { in insertSelect()
843 RC = &AArch64::GPR32RegClass; in insertSelect()
844 Opc = AArch64::CSELWr; in insertSelect()
846 } else if (MRI.constrainRegClass(DstReg, &AArch64::FPR64RegClass)) { in insertSelect()
847 RC = &AArch64::FPR64RegClass; in insertSelect()
848 Opc = AArch64::FCSELDrrr; in insertSelect()
849 } else if (MRI.constrainRegClass(DstReg, &AArch64::FPR32RegClass)) { in insertSelect()
850 RC = &AArch64::FPR32RegClass; in insertSelect()
851 Opc = AArch64::FCSELSrrr; in insertSelect()
914 case AArch64::ADDWrs: in isAsCheapAsAMove()
915 case AArch64::ADDXrs: in isAsCheapAsAMove()
916 case AArch64::SUBWrs: in isAsCheapAsAMove()
917 case AArch64::SUBXrs: in isAsCheapAsAMove()
923 case AArch64::MOVi32imm: in isAsCheapAsAMove()
925 case AArch64::MOVi64imm: in isAsCheapAsAMove()
935 case AArch64::ADDWrs: in isFalkorShiftExtFast()
936 case AArch64::ADDXrs: in isFalkorShiftExtFast()
937 case AArch64::ADDSWrs: in isFalkorShiftExtFast()
938 case AArch64::ADDSXrs: { in isFalkorShiftExtFast()
946 case AArch64::ADDWrx: in isFalkorShiftExtFast()
947 case AArch64::ADDXrx: in isFalkorShiftExtFast()
948 case AArch64::ADDXrx64: in isFalkorShiftExtFast()
949 case AArch64::ADDSWrx: in isFalkorShiftExtFast()
950 case AArch64::ADDSXrx: in isFalkorShiftExtFast()
951 case AArch64::ADDSXrx64: { in isFalkorShiftExtFast()
964 case AArch64::SUBWrs: in isFalkorShiftExtFast()
965 case AArch64::SUBSWrs: { in isFalkorShiftExtFast()
972 case AArch64::SUBXrs: in isFalkorShiftExtFast()
973 case AArch64::SUBSXrs: { in isFalkorShiftExtFast()
980 case AArch64::SUBWrx: in isFalkorShiftExtFast()
981 case AArch64::SUBXrx: in isFalkorShiftExtFast()
982 case AArch64::SUBXrx64: in isFalkorShiftExtFast()
983 case AArch64::SUBSWrx: in isFalkorShiftExtFast()
984 case AArch64::SUBSXrx: in isFalkorShiftExtFast()
985 case AArch64::SUBSXrx64: { in isFalkorShiftExtFast()
998 case AArch64::LDRBBroW: in isFalkorShiftExtFast()
999 case AArch64::LDRBBroX: in isFalkorShiftExtFast()
1000 case AArch64::LDRBroW: in isFalkorShiftExtFast()
1001 case AArch64::LDRBroX: in isFalkorShiftExtFast()
1002 case AArch64::LDRDroW: in isFalkorShiftExtFast()
1003 case AArch64::LDRDroX: in isFalkorShiftExtFast()
1004 case AArch64::LDRHHroW: in isFalkorShiftExtFast()
1005 case AArch64::LDRHHroX: in isFalkorShiftExtFast()
1006 case AArch64::LDRHroW: in isFalkorShiftExtFast()
1007 case AArch64::LDRHroX: in isFalkorShiftExtFast()
1008 case AArch64::LDRQroW: in isFalkorShiftExtFast()
1009 case AArch64::LDRQroX: in isFalkorShiftExtFast()
1010 case AArch64::LDRSBWroW: in isFalkorShiftExtFast()
1011 case AArch64::LDRSBWroX: in isFalkorShiftExtFast()
1012 case AArch64::LDRSBXroW: in isFalkorShiftExtFast()
1013 case AArch64::LDRSBXroX: in isFalkorShiftExtFast()
1014 case AArch64::LDRSHWroW: in isFalkorShiftExtFast()
1015 case AArch64::LDRSHWroX: in isFalkorShiftExtFast()
1016 case AArch64::LDRSHXroW: in isFalkorShiftExtFast()
1017 case AArch64::LDRSHXroX: in isFalkorShiftExtFast()
1018 case AArch64::LDRSWroW: in isFalkorShiftExtFast()
1019 case AArch64::LDRSWroX: in isFalkorShiftExtFast()
1020 case AArch64::LDRSroW: in isFalkorShiftExtFast()
1021 case AArch64::LDRSroX: in isFalkorShiftExtFast()
1022 case AArch64::LDRWroW: in isFalkorShiftExtFast()
1023 case AArch64::LDRWroX: in isFalkorShiftExtFast()
1024 case AArch64::LDRXroW: in isFalkorShiftExtFast()
1025 case AArch64::LDRXroX: in isFalkorShiftExtFast()
1026 case AArch64::PRFMroW: in isFalkorShiftExtFast()
1027 case AArch64::PRFMroX: in isFalkorShiftExtFast()
1028 case AArch64::STRBBroW: in isFalkorShiftExtFast()
1029 case AArch64::STRBBroX: in isFalkorShiftExtFast()
1030 case AArch64::STRBroW: in isFalkorShiftExtFast()
1031 case AArch64::STRBroX: in isFalkorShiftExtFast()
1032 case AArch64::STRDroW: in isFalkorShiftExtFast()
1033 case AArch64::STRDroX: in isFalkorShiftExtFast()
1034 case AArch64::STRHHroW: in isFalkorShiftExtFast()
1035 case AArch64::STRHHroX: in isFalkorShiftExtFast()
1036 case AArch64::STRHroW: in isFalkorShiftExtFast()
1037 case AArch64::STRHroX: in isFalkorShiftExtFast()
1038 case AArch64::STRQroW: in isFalkorShiftExtFast()
1039 case AArch64::STRQroX: in isFalkorShiftExtFast()
1040 case AArch64::STRSroW: in isFalkorShiftExtFast()
1041 case AArch64::STRSroX: in isFalkorShiftExtFast()
1042 case AArch64::STRWroW: in isFalkorShiftExtFast()
1043 case AArch64::STRWroX: in isFalkorShiftExtFast()
1044 case AArch64::STRXroW: in isFalkorShiftExtFast()
1045 case AArch64::STRXroX: { in isFalkorShiftExtFast()
1057 case AArch64::SEH_StackAlloc: in isSEHInstruction()
1058 case AArch64::SEH_SaveFPLR: in isSEHInstruction()
1059 case AArch64::SEH_SaveFPLR_X: in isSEHInstruction()
1060 case AArch64::SEH_SaveReg: in isSEHInstruction()
1061 case AArch64::SEH_SaveReg_X: in isSEHInstruction()
1062 case AArch64::SEH_SaveRegP: in isSEHInstruction()
1063 case AArch64::SEH_SaveRegP_X: in isSEHInstruction()
1064 case AArch64::SEH_SaveFReg: in isSEHInstruction()
1065 case AArch64::SEH_SaveFReg_X: in isSEHInstruction()
1066 case AArch64::SEH_SaveFRegP: in isSEHInstruction()
1067 case AArch64::SEH_SaveFRegP_X: in isSEHInstruction()
1068 case AArch64::SEH_SetFP: in isSEHInstruction()
1069 case AArch64::SEH_AddFP: in isSEHInstruction()
1070 case AArch64::SEH_Nop: in isSEHInstruction()
1071 case AArch64::SEH_PrologEnd: in isSEHInstruction()
1072 case AArch64::SEH_EpilogStart: in isSEHInstruction()
1073 case AArch64::SEH_EpilogEnd: in isSEHInstruction()
1074 case AArch64::SEH_PACSignLR: in isSEHInstruction()
1075 case AArch64::SEH_SaveAnyRegQP: in isSEHInstruction()
1076 case AArch64::SEH_SaveAnyRegQPX: in isSEHInstruction()
1087 case AArch64::SBFMXri: // aka sxtw in isCoalescableExtInstr()
1088 case AArch64::UBFMXri: // aka uxtw in isCoalescableExtInstr()
1096 SubIdx = AArch64::sub_32; in isCoalescableExtInstr()
1151 case AArch64::HINT: in isSchedulingBoundary()
1156 case AArch64::DSB: in isSchedulingBoundary()
1157 case AArch64::ISB: in isSchedulingBoundary()
1160 case AArch64::MSRpstatesvcrImm1: in isSchedulingBoundary()
1179 assert(MI.getNumOperands() >= 2 && "All AArch64 cmps should have 2 operands"); in analyzeCompare()
1186 case AArch64::PTEST_PP: in analyzeCompare()
1187 case AArch64::PTEST_PP_ANY: in analyzeCompare()
1194 case AArch64::SUBSWrr: in analyzeCompare()
1195 case AArch64::SUBSWrs: in analyzeCompare()
1196 case AArch64::SUBSWrx: in analyzeCompare()
1197 case AArch64::SUBSXrr: in analyzeCompare()
1198 case AArch64::SUBSXrs: in analyzeCompare()
1199 case AArch64::SUBSXrx: in analyzeCompare()
1200 case AArch64::ADDSWrr: in analyzeCompare()
1201 case AArch64::ADDSWrs: in analyzeCompare()
1202 case AArch64::ADDSWrx: in analyzeCompare()
1203 case AArch64::ADDSXrr: in analyzeCompare()
1204 case AArch64::ADDSXrs: in analyzeCompare()
1205 case AArch64::ADDSXrx: in analyzeCompare()
1212 case AArch64::SUBSWri: in analyzeCompare()
1213 case AArch64::ADDSWri: in analyzeCompare()
1214 case AArch64::SUBSXri: in analyzeCompare()
1215 case AArch64::ADDSXri: in analyzeCompare()
1221 case AArch64::ANDSWri: in analyzeCompare()
1222 case AArch64::ANDSXri: in analyzeCompare()
1230 MI.getOpcode() == AArch64::ANDSWri ? 32 : 64); in analyzeCompare()
1282 if (MI.definesRegister(AArch64::WZR, /*TRI=*/nullptr) || in convertToNonFlagSettingOpc()
1283 MI.definesRegister(AArch64::XZR, /*TRI=*/nullptr)) in convertToNonFlagSettingOpc()
1289 case AArch64::ADDSWrr: in convertToNonFlagSettingOpc()
1290 return AArch64::ADDWrr; in convertToNonFlagSettingOpc()
1291 case AArch64::ADDSWri: in convertToNonFlagSettingOpc()
1292 return MIDefinesZeroReg ? AArch64::ADDSWri : AArch64::ADDWri; in convertToNonFlagSettingOpc()
1293 case AArch64::ADDSWrs: in convertToNonFlagSettingOpc()
1294 return MIDefinesZeroReg ? AArch64::ADDSWrs : AArch64::ADDWrs; in convertToNonFlagSettingOpc()
1295 case AArch64::ADDSWrx: in convertToNonFlagSettingOpc()
1296 return AArch64::ADDWrx; in convertToNonFlagSettingOpc()
1297 case AArch64::ADDSXrr: in convertToNonFlagSettingOpc()
1298 return AArch64::ADDXrr; in convertToNonFlagSettingOpc()
1299 case AArch64::ADDSXri: in convertToNonFlagSettingOpc()
1300 return MIDefinesZeroReg ? AArch64::ADDSXri : AArch64::ADDXri; in convertToNonFlagSettingOpc()
1301 case AArch64::ADDSXrs: in convertToNonFlagSettingOpc()
1302 return MIDefinesZeroReg ? AArch64::ADDSXrs : AArch64::ADDXrs; in convertToNonFlagSettingOpc()
1303 case AArch64::ADDSXrx: in convertToNonFlagSettingOpc()
1304 return AArch64::ADDXrx; in convertToNonFlagSettingOpc()
1305 case AArch64::SUBSWrr: in convertToNonFlagSettingOpc()
1306 return AArch64::SUBWrr; in convertToNonFlagSettingOpc()
1307 case AArch64::SUBSWri: in convertToNonFlagSettingOpc()
1308 return MIDefinesZeroReg ? AArch64::SUBSWri : AArch64::SUBWri; in convertToNonFlagSettingOpc()
1309 case AArch64::SUBSWrs: in convertToNonFlagSettingOpc()
1310 return MIDefinesZeroReg ? AArch64::SUBSWrs : AArch64::SUBWrs; in convertToNonFlagSettingOpc()
1311 case AArch64::SUBSWrx: in convertToNonFlagSettingOpc()
1312 return AArch64::SUBWrx; in convertToNonFlagSettingOpc()
1313 case AArch64::SUBSXrr: in convertToNonFlagSettingOpc()
1314 return AArch64::SUBXrr; in convertToNonFlagSettingOpc()
1315 case AArch64::SUBSXri: in convertToNonFlagSettingOpc()
1316 return MIDefinesZeroReg ? AArch64::SUBSXri : AArch64::SUBXri; in convertToNonFlagSettingOpc()
1317 case AArch64::SUBSXrs: in convertToNonFlagSettingOpc()
1318 return MIDefinesZeroReg ? AArch64::SUBSXrs : AArch64::SUBXrs; in convertToNonFlagSettingOpc()
1319 case AArch64::SUBSXrx: in convertToNonFlagSettingOpc()
1320 return AArch64::SUBXrx; in convertToNonFlagSettingOpc()
1352 Instr.modifiesRegister(AArch64::NZCV, TRI)) || in areCFlagsAccessedBetweenInstrs()
1353 ((AccessToCheck & AK_Read) && Instr.readsRegister(AArch64::NZCV, TRI))) in areCFlagsAccessedBetweenInstrs()
1372 if ((Mask == Pred) && PTest->getOpcode() == AArch64::PTEST_PP_ANY) in canRemovePTestInstr()
1391 if ((Mask == Pred) && PTest->getOpcode() == AArch64::PTEST_PP_ANY) in canRemovePTestInstr()
1401 if (Mask == PTestLikeMask || PTest->getOpcode() == AArch64::PTEST_PP_ANY) in canRemovePTestInstr()
1429 if (Mask == PTestLikeMask && (PredElementSize == AArch64::ElementSizeB || in canRemovePTestInstr()
1430 PTest->getOpcode() == AArch64::PTEST_PP_ANY)) in canRemovePTestInstr()
1439 case AArch64::AND_PPzPP: in canRemovePTestInstr()
1440 case AArch64::BIC_PPzPP: in canRemovePTestInstr()
1441 case AArch64::EOR_PPzPP: in canRemovePTestInstr()
1442 case AArch64::NAND_PPzPP: in canRemovePTestInstr()
1443 case AArch64::NOR_PPzPP: in canRemovePTestInstr()
1444 case AArch64::ORN_PPzPP: in canRemovePTestInstr()
1445 case AArch64::ORR_PPzPP: in canRemovePTestInstr()
1446 case AArch64::BRKA_PPzP: in canRemovePTestInstr()
1447 case AArch64::BRKPA_PPzPP: in canRemovePTestInstr()
1448 case AArch64::BRKB_PPzP: in canRemovePTestInstr()
1449 case AArch64::BRKPB_PPzPP: in canRemovePTestInstr()
1450 case AArch64::RDFFR_PPz: { in canRemovePTestInstr()
1458 case AArch64::BRKN_PPzP: { in canRemovePTestInstr()
1462 if ((MaskOpcode != AArch64::PTRUE_B) || in canRemovePTestInstr()
1467 case AArch64::PTRUE_B: in canRemovePTestInstr()
1507 Pred->addRegisterDefined(AArch64::NZCV, TRI); in optimizePTestInstr()
1511 if (Pred->registerDefIsDead(AArch64::NZCV, TRI)) { in optimizePTestInstr()
1515 if (MO.isReg() && MO.isDef() && MO.getReg() == AArch64::NZCV) { in optimizePTestInstr()
1525 /// instruction which produces AArch64::NZCV. It can be truly compare
1543 CmpInstr.findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, true); in optimizeCompareInstr()
1545 if (CmpInstr.definesRegister(AArch64::WZR, /*TRI=*/nullptr) || in optimizeCompareInstr()
1546 CmpInstr.definesRegister(AArch64::XZR, /*TRI=*/nullptr)) { in optimizeCompareInstr()
1563 if (CmpInstr.getOpcode() == AArch64::PTEST_PP || in optimizeCompareInstr()
1564 CmpInstr.getOpcode() == AArch64::PTEST_PP_ANY) in optimizeCompareInstr()
1582 /// AArch64::INSTRUCTION_LIST_END is returned if Instr does not have S version
1587 return AArch64::INSTRUCTION_LIST_END; in sForm()
1589 case AArch64::ADDSWrr: in sForm()
1590 case AArch64::ADDSWri: in sForm()
1591 case AArch64::ADDSXrr: in sForm()
1592 case AArch64::ADDSXri: in sForm()
1593 case AArch64::SUBSWrr: in sForm()
1594 case AArch64::SUBSWri: in sForm()
1595 case AArch64::SUBSXrr: in sForm()
1596 case AArch64::SUBSXri: in sForm()
1599 case AArch64::ADDWrr: in sForm()
1600 return AArch64::ADDSWrr; in sForm()
1601 case AArch64::ADDWri: in sForm()
1602 return AArch64::ADDSWri; in sForm()
1603 case AArch64::ADDXrr: in sForm()
1604 return AArch64::ADDSXrr; in sForm()
1605 case AArch64::ADDXri: in sForm()
1606 return AArch64::ADDSXri; in sForm()
1607 case AArch64::ADCWr: in sForm()
1608 return AArch64::ADCSWr; in sForm()
1609 case AArch64::ADCXr: in sForm()
1610 return AArch64::ADCSXr; in sForm()
1611 case AArch64::SUBWrr: in sForm()
1612 return AArch64::SUBSWrr; in sForm()
1613 case AArch64::SUBWri: in sForm()
1614 return AArch64::SUBSWri; in sForm()
1615 case AArch64::SUBXrr: in sForm()
1616 return AArch64::SUBSXrr; in sForm()
1617 case AArch64::SUBXri: in sForm()
1618 return AArch64::SUBSXri; in sForm()
1619 case AArch64::SBCWr: in sForm()
1620 return AArch64::SBCSWr; in sForm()
1621 case AArch64::SBCXr: in sForm()
1622 return AArch64::SBCSXr; in sForm()
1623 case AArch64::ANDWri: in sForm()
1624 return AArch64::ANDSWri; in sForm()
1625 case AArch64::ANDXri: in sForm()
1626 return AArch64::ANDSXri; in sForm()
1630 /// Check if AArch64::NZCV should be alive in successors of MBB.
1633 if (BB->isLiveIn(AArch64::NZCV)) in areCFlagsAliveInSuccessors()
1646 case AArch64::Bcc: { in findCondCodeUseOperandIdxForBranchOrSelect()
1647 int Idx = Instr.findRegisterUseOperandIdx(AArch64::NZCV, /*TRI=*/nullptr); in findCondCodeUseOperandIdxForBranchOrSelect()
1652 case AArch64::CSINVWr: in findCondCodeUseOperandIdxForBranchOrSelect()
1653 case AArch64::CSINVXr: in findCondCodeUseOperandIdxForBranchOrSelect()
1654 case AArch64::CSINCWr: in findCondCodeUseOperandIdxForBranchOrSelect()
1655 case AArch64::CSINCXr: in findCondCodeUseOperandIdxForBranchOrSelect()
1656 case AArch64::CSELWr: in findCondCodeUseOperandIdxForBranchOrSelect()
1657 case AArch64::CSELXr: in findCondCodeUseOperandIdxForBranchOrSelect()
1658 case AArch64::CSNEGWr: in findCondCodeUseOperandIdxForBranchOrSelect()
1659 case AArch64::CSNEGXr: in findCondCodeUseOperandIdxForBranchOrSelect()
1660 case AArch64::FCSELSrrr: in findCondCodeUseOperandIdxForBranchOrSelect()
1661 case AArch64::FCSELDrrr: { in findCondCodeUseOperandIdxForBranchOrSelect()
1662 int Idx = Instr.findRegisterUseOperandIdx(AArch64::NZCV, /*TRI=*/nullptr); in findCondCodeUseOperandIdxForBranchOrSelect()
1742 if (Instr.readsRegister(AArch64::NZCV, &TRI)) { in examineCFlagsUse()
1750 if (Instr.modifiesRegister(AArch64::NZCV, &TRI)) in examineCFlagsUse()
1757 return Opcode == AArch64::ADDSWri || Opcode == AArch64::ADDSXri; in isADDSRegImm()
1761 return Opcode == AArch64::SUBSWri || Opcode == AArch64::SUBSXri; in isSUBSRegImm()
1781 assert(sForm(MI) != AArch64::INSTRUCTION_LIST_END); in canInstrSubstituteCmpInstr()
1825 if (NewOpc == AArch64::INSTRUCTION_LIST_END) in substituteCmpToZero()
1837 MI->addRegisterDefined(AArch64::NZCV, &TRI); in substituteCmpToZero()
1854 if (MIOpc == AArch64::CSINCWr) { in canCmpInstrBeRemoved()
1855 if (MI.getOperand(1).getReg() != AArch64::WZR || in canCmpInstrBeRemoved()
1856 MI.getOperand(2).getReg() != AArch64::WZR) in canCmpInstrBeRemoved()
1858 } else if (MIOpc == AArch64::CSINCXr) { in canCmpInstrBeRemoved()
1859 if (MI.getOperand(1).getReg() != AArch64::XZR || in canCmpInstrBeRemoved()
1860 MI.getOperand(2).getReg() != AArch64::XZR) in canCmpInstrBeRemoved()
1870 if (MI.findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, true) != -1) in canCmpInstrBeRemoved()
1969 MI.getOpcode() != AArch64::CATCHRET) in expandPostRAPseudo()
1977 if (MI.getOpcode() == AArch64::CATCHRET) { in expandPostRAPseudo()
1989 BuildMI(MBB, FirstEpilogSEH, DL, TII->get(AArch64::ADRP)) in expandPostRAPseudo()
1990 .addReg(AArch64::X0, RegState::Define) in expandPostRAPseudo()
1992 BuildMI(MBB, FirstEpilogSEH, DL, TII->get(AArch64::ADDXri)) in expandPostRAPseudo()
1993 .addReg(AArch64::X0, RegState::Define) in expandPostRAPseudo()
1994 .addReg(AArch64::X0) in expandPostRAPseudo()
2009 BuildMI(MBB, MI, DL, get(AArch64::MRS)) in expandPostRAPseudo()
2015 BuildMI(MBB, MI, DL, get(AArch64::LDRXui)) in expandPostRAPseudo()
2021 BuildMI(MBB, MI, DL, get(AArch64::LDURXi)) in expandPostRAPseudo()
2028 BuildMI(MBB, MI, DL, get(AArch64::ADDXri)) in expandPostRAPseudo()
2035 BuildMI(MBB, MI, DL, get(AArch64::SUBXri)) in expandPostRAPseudo()
2042 BuildMI(MBB, MI, DL, get(AArch64::LDRXui)) in expandPostRAPseudo()
2049 // It might be nice to use AArch64::MOVi32imm here, which would get in expandPostRAPseudo()
2055 // to insert a AArch64::MOVi32imm before register allocation so that we in expandPostRAPseudo()
2070 BuildMI(MBB, MI, DL, get(AArch64::LOADgot), Reg) in expandPostRAPseudo()
2073 unsigned Reg32 = TRI->getSubReg(Reg, AArch64::sub_32); in expandPostRAPseudo()
2074 BuildMI(MBB, MI, DL, get(AArch64::LDRWui)) in expandPostRAPseudo()
2081 BuildMI(MBB, MI, DL, get(AArch64::LDRXui), Reg) in expandPostRAPseudo()
2088 BuildMI(MBB, MI, DL, get(AArch64::MOVZXi), Reg) in expandPostRAPseudo()
2091 BuildMI(MBB, MI, DL, get(AArch64::MOVKXi), Reg) in expandPostRAPseudo()
2095 BuildMI(MBB, MI, DL, get(AArch64::MOVKXi), Reg) in expandPostRAPseudo()
2099 BuildMI(MBB, MI, DL, get(AArch64::MOVKXi), Reg) in expandPostRAPseudo()
2103 BuildMI(MBB, MI, DL, get(AArch64::LDRXui), Reg) in expandPostRAPseudo()
2108 BuildMI(MBB, MI, DL, get(AArch64::ADR), Reg) in expandPostRAPseudo()
2111 BuildMI(MBB, MI, DL, get(AArch64::ADRP), Reg) in expandPostRAPseudo()
2115 unsigned Reg32 = TRI->getSubReg(Reg, AArch64::sub_32); in expandPostRAPseudo()
2116 BuildMI(MBB, MI, DL, get(AArch64::LDRWui)) in expandPostRAPseudo()
2123 BuildMI(MBB, MI, DL, get(AArch64::LDRXui), Reg) in expandPostRAPseudo()
2141 case AArch64::MOVZWi: in isGPRZero()
2142 case AArch64::MOVZXi: // movz Rd, #0 (LSL #0) in isGPRZero()
2149 case AArch64::ANDWri: // and Rd, Rzr, #imm in isGPRZero()
2150 return MI.getOperand(1).getReg() == AArch64::WZR; in isGPRZero()
2151 case AArch64::ANDXri: in isGPRZero()
2152 return MI.getOperand(1).getReg() == AArch64::XZR; in isGPRZero()
2154 return MI.getOperand(1).getReg() == AArch64::WZR; in isGPRZero()
2168 return (AArch64::GPR32RegClass.contains(DstReg) || in isGPRCopy()
2169 AArch64::GPR64RegClass.contains(DstReg)); in isGPRCopy()
2171 case AArch64::ORRXrs: // orr Xd, Xzr, Xm (LSL #0) in isGPRCopy()
2172 if (MI.getOperand(1).getReg() == AArch64::XZR) { in isGPRCopy()
2178 case AArch64::ADDXri: // add Xd, Xn, #0 (LSL #0) in isGPRCopy()
2197 return AArch64::FPR128RegClass.contains(DstReg); in isFPRCopy()
2199 case AArch64::ORRv16i8: in isFPRCopy()
2215 case AArch64::LDRWui: in isLoadFromStackSlot()
2216 case AArch64::LDRXui: in isLoadFromStackSlot()
2217 case AArch64::LDRBui: in isLoadFromStackSlot()
2218 case AArch64::LDRHui: in isLoadFromStackSlot()
2219 case AArch64::LDRSui: in isLoadFromStackSlot()
2220 case AArch64::LDRDui: in isLoadFromStackSlot()
2221 case AArch64::LDRQui: in isLoadFromStackSlot()
2222 case AArch64::LDR_PXI: in isLoadFromStackSlot()
2239 case AArch64::STRWui: in isStoreToStackSlot()
2240 case AArch64::STRXui: in isStoreToStackSlot()
2241 case AArch64::STRBui: in isStoreToStackSlot()
2242 case AArch64::STRHui: in isStoreToStackSlot()
2243 case AArch64::STRSui: in isStoreToStackSlot()
2244 case AArch64::STRDui: in isStoreToStackSlot()
2245 case AArch64::STRQui: in isStoreToStackSlot()
2246 case AArch64::STR_PXI: in isStoreToStackSlot()
2282 case AArch64::STURSi: in hasUnscaledLdStOffset()
2283 case AArch64::STRSpre: in hasUnscaledLdStOffset()
2284 case AArch64::STURDi: in hasUnscaledLdStOffset()
2285 case AArch64::STRDpre: in hasUnscaledLdStOffset()
2286 case AArch64::STURQi: in hasUnscaledLdStOffset()
2287 case AArch64::STRQpre: in hasUnscaledLdStOffset()
2288 case AArch64::STURBBi: in hasUnscaledLdStOffset()
2289 case AArch64::STURHHi: in hasUnscaledLdStOffset()
2290 case AArch64::STURWi: in hasUnscaledLdStOffset()
2291 case AArch64::STRWpre: in hasUnscaledLdStOffset()
2292 case AArch64::STURXi: in hasUnscaledLdStOffset()
2293 case AArch64::STRXpre: in hasUnscaledLdStOffset()
2294 case AArch64::LDURSi: in hasUnscaledLdStOffset()
2295 case AArch64::LDRSpre: in hasUnscaledLdStOffset()
2296 case AArch64::LDURDi: in hasUnscaledLdStOffset()
2297 case AArch64::LDRDpre: in hasUnscaledLdStOffset()
2298 case AArch64::LDURQi: in hasUnscaledLdStOffset()
2299 case AArch64::LDRQpre: in hasUnscaledLdStOffset()
2300 case AArch64::LDURWi: in hasUnscaledLdStOffset()
2301 case AArch64::LDRWpre: in hasUnscaledLdStOffset()
2302 case AArch64::LDURXi: in hasUnscaledLdStOffset()
2303 case AArch64::LDRXpre: in hasUnscaledLdStOffset()
2304 case AArch64::LDRSWpre: in hasUnscaledLdStOffset()
2305 case AArch64::LDURSWi: in hasUnscaledLdStOffset()
2306 case AArch64::LDURHHi: in hasUnscaledLdStOffset()
2307 case AArch64::LDURBBi: in hasUnscaledLdStOffset()
2308 case AArch64::LDURSBWi: in hasUnscaledLdStOffset()
2309 case AArch64::LDURSHWi: in hasUnscaledLdStOffset()
2317 case AArch64::PRFMui: return AArch64::PRFUMi; in getUnscaledLdSt()
2318 case AArch64::LDRXui: return AArch64::LDURXi; in getUnscaledLdSt()
2319 case AArch64::LDRWui: return AArch64::LDURWi; in getUnscaledLdSt()
2320 case AArch64::LDRBui: return AArch64::LDURBi; in getUnscaledLdSt()
2321 case AArch64::LDRHui: return AArch64::LDURHi; in getUnscaledLdSt()
2322 case AArch64::LDRSui: return AArch64::LDURSi; in getUnscaledLdSt()
2323 case AArch64::LDRDui: return AArch64::LDURDi; in getUnscaledLdSt()
2324 case AArch64::LDRQui: return AArch64::LDURQi; in getUnscaledLdSt()
2325 case AArch64::LDRBBui: return AArch64::LDURBBi; in getUnscaledLdSt()
2326 case AArch64::LDRHHui: return AArch64::LDURHHi; in getUnscaledLdSt()
2327 case AArch64::LDRSBXui: return AArch64::LDURSBXi; in getUnscaledLdSt()
2328 case AArch64::LDRSBWui: return AArch64::LDURSBWi; in getUnscaledLdSt()
2329 case AArch64::LDRSHXui: return AArch64::LDURSHXi; in getUnscaledLdSt()
2330 case AArch64::LDRSHWui: return AArch64::LDURSHWi; in getUnscaledLdSt()
2331 case AArch64::LDRSWui: return AArch64::LDURSWi; in getUnscaledLdSt()
2332 case AArch64::STRXui: return AArch64::STURXi; in getUnscaledLdSt()
2333 case AArch64::STRWui: return AArch64::STURWi; in getUnscaledLdSt()
2334 case AArch64::STRBui: return AArch64::STURBi; in getUnscaledLdSt()
2335 case AArch64::STRHui: return AArch64::STURHi; in getUnscaledLdSt()
2336 case AArch64::STRSui: return AArch64::STURSi; in getUnscaledLdSt()
2337 case AArch64::STRDui: return AArch64::STURDi; in getUnscaledLdSt()
2338 case AArch64::STRQui: return AArch64::STURQi; in getUnscaledLdSt()
2339 case AArch64::STRBBui: return AArch64::STURBBi; in getUnscaledLdSt()
2340 case AArch64::STRHHui: return AArch64::STURHHi; in getUnscaledLdSt()
2348 case AArch64::LDPXi: in getLoadStoreImmIdx()
2349 case AArch64::LDPDi: in getLoadStoreImmIdx()
2350 case AArch64::STPXi: in getLoadStoreImmIdx()
2351 case AArch64::STPDi: in getLoadStoreImmIdx()
2352 case AArch64::LDNPXi: in getLoadStoreImmIdx()
2353 case AArch64::LDNPDi: in getLoadStoreImmIdx()
2354 case AArch64::STNPXi: in getLoadStoreImmIdx()
2355 case AArch64::STNPDi: in getLoadStoreImmIdx()
2356 case AArch64::LDPQi: in getLoadStoreImmIdx()
2357 case AArch64::STPQi: in getLoadStoreImmIdx()
2358 case AArch64::LDNPQi: in getLoadStoreImmIdx()
2359 case AArch64::STNPQi: in getLoadStoreImmIdx()
2360 case AArch64::LDPWi: in getLoadStoreImmIdx()
2361 case AArch64::LDPSi: in getLoadStoreImmIdx()
2362 case AArch64::STPWi: in getLoadStoreImmIdx()
2363 case AArch64::STPSi: in getLoadStoreImmIdx()
2364 case AArch64::LDNPWi: in getLoadStoreImmIdx()
2365 case AArch64::LDNPSi: in getLoadStoreImmIdx()
2366 case AArch64::STNPWi: in getLoadStoreImmIdx()
2367 case AArch64::STNPSi: in getLoadStoreImmIdx()
2368 case AArch64::LDG: in getLoadStoreImmIdx()
2369 case AArch64::STGPi: in getLoadStoreImmIdx()
2371 case AArch64::LD1B_IMM: in getLoadStoreImmIdx()
2372 case AArch64::LD1B_H_IMM: in getLoadStoreImmIdx()
2373 case AArch64::LD1B_S_IMM: in getLoadStoreImmIdx()
2374 case AArch64::LD1B_D_IMM: in getLoadStoreImmIdx()
2375 case AArch64::LD1SB_H_IMM: in getLoadStoreImmIdx()
2376 case AArch64::LD1SB_S_IMM: in getLoadStoreImmIdx()
2377 case AArch64::LD1SB_D_IMM: in getLoadStoreImmIdx()
2378 case AArch64::LD1H_IMM: in getLoadStoreImmIdx()
2379 case AArch64::LD1H_S_IMM: in getLoadStoreImmIdx()
2380 case AArch64::LD1H_D_IMM: in getLoadStoreImmIdx()
2381 case AArch64::LD1SH_S_IMM: in getLoadStoreImmIdx()
2382 case AArch64::LD1SH_D_IMM: in getLoadStoreImmIdx()
2383 case AArch64::LD1W_IMM: in getLoadStoreImmIdx()
2384 case AArch64::LD1W_D_IMM: in getLoadStoreImmIdx()
2385 case AArch64::LD1SW_D_IMM: in getLoadStoreImmIdx()
2386 case AArch64::LD1D_IMM: in getLoadStoreImmIdx()
2388 case AArch64::LD2B_IMM: in getLoadStoreImmIdx()
2389 case AArch64::LD2H_IMM: in getLoadStoreImmIdx()
2390 case AArch64::LD2W_IMM: in getLoadStoreImmIdx()
2391 case AArch64::LD2D_IMM: in getLoadStoreImmIdx()
2392 case AArch64::LD3B_IMM: in getLoadStoreImmIdx()
2393 case AArch64::LD3H_IMM: in getLoadStoreImmIdx()
2394 case AArch64::LD3W_IMM: in getLoadStoreImmIdx()
2395 case AArch64::LD3D_IMM: in getLoadStoreImmIdx()
2396 case AArch64::LD4B_IMM: in getLoadStoreImmIdx()
2397 case AArch64::LD4H_IMM: in getLoadStoreImmIdx()
2398 case AArch64::LD4W_IMM: in getLoadStoreImmIdx()
2399 case AArch64::LD4D_IMM: in getLoadStoreImmIdx()
2401 case AArch64::ST1B_IMM: in getLoadStoreImmIdx()
2402 case AArch64::ST1B_H_IMM: in getLoadStoreImmIdx()
2403 case AArch64::ST1B_S_IMM: in getLoadStoreImmIdx()
2404 case AArch64::ST1B_D_IMM: in getLoadStoreImmIdx()
2405 case AArch64::ST1H_IMM: in getLoadStoreImmIdx()
2406 case AArch64::ST1H_S_IMM: in getLoadStoreImmIdx()
2407 case AArch64::ST1H_D_IMM: in getLoadStoreImmIdx()
2408 case AArch64::ST1W_IMM: in getLoadStoreImmIdx()
2409 case AArch64::ST1W_D_IMM: in getLoadStoreImmIdx()
2410 case AArch64::ST1D_IMM: in getLoadStoreImmIdx()
2412 case AArch64::ST2B_IMM: in getLoadStoreImmIdx()
2413 case AArch64::ST2H_IMM: in getLoadStoreImmIdx()
2414 case AArch64::ST2W_IMM: in getLoadStoreImmIdx()
2415 case AArch64::ST2D_IMM: in getLoadStoreImmIdx()
2416 case AArch64::ST3B_IMM: in getLoadStoreImmIdx()
2417 case AArch64::ST3H_IMM: in getLoadStoreImmIdx()
2418 case AArch64::ST3W_IMM: in getLoadStoreImmIdx()
2419 case AArch64::ST3D_IMM: in getLoadStoreImmIdx()
2420 case AArch64::ST4B_IMM: in getLoadStoreImmIdx()
2421 case AArch64::ST4H_IMM: in getLoadStoreImmIdx()
2422 case AArch64::ST4W_IMM: in getLoadStoreImmIdx()
2423 case AArch64::ST4D_IMM: in getLoadStoreImmIdx()
2425 case AArch64::LD1RB_IMM: in getLoadStoreImmIdx()
2426 case AArch64::LD1RB_H_IMM: in getLoadStoreImmIdx()
2427 case AArch64::LD1RB_S_IMM: in getLoadStoreImmIdx()
2428 case AArch64::LD1RB_D_IMM: in getLoadStoreImmIdx()
2429 case AArch64::LD1RSB_H_IMM: in getLoadStoreImmIdx()
2430 case AArch64::LD1RSB_S_IMM: in getLoadStoreImmIdx()
2431 case AArch64::LD1RSB_D_IMM: in getLoadStoreImmIdx()
2432 case AArch64::LD1RH_IMM: in getLoadStoreImmIdx()
2433 case AArch64::LD1RH_S_IMM: in getLoadStoreImmIdx()
2434 case AArch64::LD1RH_D_IMM: in getLoadStoreImmIdx()
2435 case AArch64::LD1RSH_S_IMM: in getLoadStoreImmIdx()
2436 case AArch64::LD1RSH_D_IMM: in getLoadStoreImmIdx()
2437 case AArch64::LD1RW_IMM: in getLoadStoreImmIdx()
2438 case AArch64::LD1RW_D_IMM: in getLoadStoreImmIdx()
2439 case AArch64::LD1RSW_IMM: in getLoadStoreImmIdx()
2440 case AArch64::LD1RD_IMM: in getLoadStoreImmIdx()
2442 case AArch64::LDNT1B_ZRI: in getLoadStoreImmIdx()
2443 case AArch64::LDNT1H_ZRI: in getLoadStoreImmIdx()
2444 case AArch64::LDNT1W_ZRI: in getLoadStoreImmIdx()
2445 case AArch64::LDNT1D_ZRI: in getLoadStoreImmIdx()
2446 case AArch64::STNT1B_ZRI: in getLoadStoreImmIdx()
2447 case AArch64::STNT1H_ZRI: in getLoadStoreImmIdx()
2448 case AArch64::STNT1W_ZRI: in getLoadStoreImmIdx()
2449 case AArch64::STNT1D_ZRI: in getLoadStoreImmIdx()
2451 case AArch64::LDNF1B_IMM: in getLoadStoreImmIdx()
2452 case AArch64::LDNF1B_H_IMM: in getLoadStoreImmIdx()
2453 case AArch64::LDNF1B_S_IMM: in getLoadStoreImmIdx()
2454 case AArch64::LDNF1B_D_IMM: in getLoadStoreImmIdx()
2455 case AArch64::LDNF1SB_H_IMM: in getLoadStoreImmIdx()
2456 case AArch64::LDNF1SB_S_IMM: in getLoadStoreImmIdx()
2457 case AArch64::LDNF1SB_D_IMM: in getLoadStoreImmIdx()
2458 case AArch64::LDNF1H_IMM: in getLoadStoreImmIdx()
2459 case AArch64::LDNF1H_S_IMM: in getLoadStoreImmIdx()
2460 case AArch64::LDNF1H_D_IMM: in getLoadStoreImmIdx()
2461 case AArch64::LDNF1SH_S_IMM: in getLoadStoreImmIdx()
2462 case AArch64::LDNF1SH_D_IMM: in getLoadStoreImmIdx()
2463 case AArch64::LDNF1W_IMM: in getLoadStoreImmIdx()
2464 case AArch64::LDNF1W_D_IMM: in getLoadStoreImmIdx()
2465 case AArch64::LDNF1SW_D_IMM: in getLoadStoreImmIdx()
2466 case AArch64::LDNF1D_IMM: in getLoadStoreImmIdx()
2468 case AArch64::ADDG: in getLoadStoreImmIdx()
2469 case AArch64::STGi: in getLoadStoreImmIdx()
2470 case AArch64::LDR_PXI: in getLoadStoreImmIdx()
2471 case AArch64::STR_PXI: in getLoadStoreImmIdx()
2481 case AArch64::STRSui: in isPairableLdStInst()
2482 case AArch64::STRDui: in isPairableLdStInst()
2483 case AArch64::STRQui: in isPairableLdStInst()
2484 case AArch64::STRXui: in isPairableLdStInst()
2485 case AArch64::STRWui: in isPairableLdStInst()
2486 case AArch64::LDRSui: in isPairableLdStInst()
2487 case AArch64::LDRDui: in isPairableLdStInst()
2488 case AArch64::LDRQui: in isPairableLdStInst()
2489 case AArch64::LDRXui: in isPairableLdStInst()
2490 case AArch64::LDRWui: in isPairableLdStInst()
2491 case AArch64::LDRSWui: in isPairableLdStInst()
2493 case AArch64::STURSi: in isPairableLdStInst()
2494 case AArch64::STRSpre: in isPairableLdStInst()
2495 case AArch64::STURDi: in isPairableLdStInst()
2496 case AArch64::STRDpre: in isPairableLdStInst()
2497 case AArch64::STURQi: in isPairableLdStInst()
2498 case AArch64::STRQpre: in isPairableLdStInst()
2499 case AArch64::STURWi: in isPairableLdStInst()
2500 case AArch64::STRWpre: in isPairableLdStInst()
2501 case AArch64::STURXi: in isPairableLdStInst()
2502 case AArch64::STRXpre: in isPairableLdStInst()
2503 case AArch64::LDURSi: in isPairableLdStInst()
2504 case AArch64::LDRSpre: in isPairableLdStInst()
2505 case AArch64::LDURDi: in isPairableLdStInst()
2506 case AArch64::LDRDpre: in isPairableLdStInst()
2507 case AArch64::LDURQi: in isPairableLdStInst()
2508 case AArch64::LDRQpre: in isPairableLdStInst()
2509 case AArch64::LDURWi: in isPairableLdStInst()
2510 case AArch64::LDRWpre: in isPairableLdStInst()
2511 case AArch64::LDURXi: in isPairableLdStInst()
2512 case AArch64::LDRXpre: in isPairableLdStInst()
2513 case AArch64::LDURSWi: in isPairableLdStInst()
2514 case AArch64::LDRSWpre: in isPairableLdStInst()
2525 case AArch64::TCRETURNdi: in isTailCallReturnInst()
2526 case AArch64::TCRETURNri: in isTailCallReturnInst()
2527 case AArch64::TCRETURNrix16x17: in isTailCallReturnInst()
2528 case AArch64::TCRETURNrix17: in isTailCallReturnInst()
2529 case AArch64::TCRETURNrinotx16: in isTailCallReturnInst()
2530 case AArch64::TCRETURNriALL: in isTailCallReturnInst()
2531 case AArch64::AUTH_TCRETURN: in isTailCallReturnInst()
2532 case AArch64::AUTH_TCRETURN_BTI: in isTailCallReturnInst()
2542 case AArch64::ADDWri: in convertToFlagSettingOpc()
2543 return AArch64::ADDSWri; in convertToFlagSettingOpc()
2544 case AArch64::ADDWrr: in convertToFlagSettingOpc()
2545 return AArch64::ADDSWrr; in convertToFlagSettingOpc()
2546 case AArch64::ADDWrs: in convertToFlagSettingOpc()
2547 return AArch64::ADDSWrs; in convertToFlagSettingOpc()
2548 case AArch64::ADDWrx: in convertToFlagSettingOpc()
2549 return AArch64::ADDSWrx; in convertToFlagSettingOpc()
2550 case AArch64::ANDWri: in convertToFlagSettingOpc()
2551 return AArch64::ANDSWri; in convertToFlagSettingOpc()
2552 case AArch64::ANDWrr: in convertToFlagSettingOpc()
2553 return AArch64::ANDSWrr; in convertToFlagSettingOpc()
2554 case AArch64::ANDWrs: in convertToFlagSettingOpc()
2555 return AArch64::ANDSWrs; in convertToFlagSettingOpc()
2556 case AArch64::BICWrr: in convertToFlagSettingOpc()
2557 return AArch64::BICSWrr; in convertToFlagSettingOpc()
2558 case AArch64::BICWrs: in convertToFlagSettingOpc()
2559 return AArch64::BICSWrs; in convertToFlagSettingOpc()
2560 case AArch64::SUBWri: in convertToFlagSettingOpc()
2561 return AArch64::SUBSWri; in convertToFlagSettingOpc()
2562 case AArch64::SUBWrr: in convertToFlagSettingOpc()
2563 return AArch64::SUBSWrr; in convertToFlagSettingOpc()
2564 case AArch64::SUBWrs: in convertToFlagSettingOpc()
2565 return AArch64::SUBSWrs; in convertToFlagSettingOpc()
2566 case AArch64::SUBWrx: in convertToFlagSettingOpc()
2567 return AArch64::SUBSWrx; in convertToFlagSettingOpc()
2569 case AArch64::ADDXri: in convertToFlagSettingOpc()
2570 return AArch64::ADDSXri; in convertToFlagSettingOpc()
2571 case AArch64::ADDXrr: in convertToFlagSettingOpc()
2572 return AArch64::ADDSXrr; in convertToFlagSettingOpc()
2573 case AArch64::ADDXrs: in convertToFlagSettingOpc()
2574 return AArch64::ADDSXrs; in convertToFlagSettingOpc()
2575 case AArch64::ADDXrx: in convertToFlagSettingOpc()
2576 return AArch64::ADDSXrx; in convertToFlagSettingOpc()
2577 case AArch64::ANDXri: in convertToFlagSettingOpc()
2578 return AArch64::ANDSXri; in convertToFlagSettingOpc()
2579 case AArch64::ANDXrr: in convertToFlagSettingOpc()
2580 return AArch64::ANDSXrr; in convertToFlagSettingOpc()
2581 case AArch64::ANDXrs: in convertToFlagSettingOpc()
2582 return AArch64::ANDSXrs; in convertToFlagSettingOpc()
2583 case AArch64::BICXrr: in convertToFlagSettingOpc()
2584 return AArch64::BICSXrr; in convertToFlagSettingOpc()
2585 case AArch64::BICXrs: in convertToFlagSettingOpc()
2586 return AArch64::BICSXrs; in convertToFlagSettingOpc()
2587 case AArch64::SUBXri: in convertToFlagSettingOpc()
2588 return AArch64::SUBSXri; in convertToFlagSettingOpc()
2589 case AArch64::SUBXrr: in convertToFlagSettingOpc()
2590 return AArch64::SUBSXrr; in convertToFlagSettingOpc()
2591 case AArch64::SUBXrs: in convertToFlagSettingOpc()
2592 return AArch64::SUBSXrs; in convertToFlagSettingOpc()
2593 case AArch64::SUBXrx: in convertToFlagSettingOpc()
2594 return AArch64::SUBSXrx; in convertToFlagSettingOpc()
2596 case AArch64::AND_PPzPP: in convertToFlagSettingOpc()
2597 return AArch64::ANDS_PPzPP; in convertToFlagSettingOpc()
2598 case AArch64::BIC_PPzPP: in convertToFlagSettingOpc()
2599 return AArch64::BICS_PPzPP; in convertToFlagSettingOpc()
2600 case AArch64::EOR_PPzPP: in convertToFlagSettingOpc()
2601 return AArch64::EORS_PPzPP; in convertToFlagSettingOpc()
2602 case AArch64::NAND_PPzPP: in convertToFlagSettingOpc()
2603 return AArch64::NANDS_PPzPP; in convertToFlagSettingOpc()
2604 case AArch64::NOR_PPzPP: in convertToFlagSettingOpc()
2605 return AArch64::NORS_PPzPP; in convertToFlagSettingOpc()
2606 case AArch64::ORN_PPzPP: in convertToFlagSettingOpc()
2607 return AArch64::ORNS_PPzPP; in convertToFlagSettingOpc()
2608 case AArch64::ORR_PPzPP: in convertToFlagSettingOpc()
2609 return AArch64::ORRS_PPzPP; in convertToFlagSettingOpc()
2610 case AArch64::BRKA_PPzP: in convertToFlagSettingOpc()
2611 return AArch64::BRKAS_PPzP; in convertToFlagSettingOpc()
2612 case AArch64::BRKPA_PPzPP: in convertToFlagSettingOpc()
2613 return AArch64::BRKPAS_PPzPP; in convertToFlagSettingOpc()
2614 case AArch64::BRKB_PPzP: in convertToFlagSettingOpc()
2615 return AArch64::BRKBS_PPzP; in convertToFlagSettingOpc()
2616 case AArch64::BRKPB_PPzPP: in convertToFlagSettingOpc()
2617 return AArch64::BRKPBS_PPzPP; in convertToFlagSettingOpc()
2618 case AArch64::BRKN_PPzP: in convertToFlagSettingOpc()
2619 return AArch64::BRKNS_PPzP; in convertToFlagSettingOpc()
2620 case AArch64::RDFFR_PPz: in convertToFlagSettingOpc()
2621 return AArch64::RDFFRS_PPz; in convertToFlagSettingOpc()
2622 case AArch64::PTRUE_B: in convertToFlagSettingOpc()
2623 return AArch64::PTRUES_B; in convertToFlagSettingOpc()
2688 case AArch64::LDURQi: in isCandidateToMergeOrPair()
2689 case AArch64::STURQi: in isCandidateToMergeOrPair()
2690 case AArch64::LDRQui: in isCandidateToMergeOrPair()
2691 case AArch64::STRQui: in isCandidateToMergeOrPair()
2711 // The maximum vscale is 16 under AArch64, return the maximal extent for the in getMemOperandsWithOffsetWidth()
2748 case AArch64::LDURQi: in canFoldIntoAddrMode()
2749 case AArch64::STURQi: in canFoldIntoAddrMode()
2753 case AArch64::LDURDi: in canFoldIntoAddrMode()
2754 case AArch64::STURDi: in canFoldIntoAddrMode()
2755 case AArch64::LDURXi: in canFoldIntoAddrMode()
2756 case AArch64::STURXi: in canFoldIntoAddrMode()
2760 case AArch64::LDURWi: in canFoldIntoAddrMode()
2761 case AArch64::LDURSWi: in canFoldIntoAddrMode()
2762 case AArch64::STURWi: in canFoldIntoAddrMode()
2766 case AArch64::LDURHi: in canFoldIntoAddrMode()
2767 case AArch64::STURHi: in canFoldIntoAddrMode()
2768 case AArch64::LDURHHi: in canFoldIntoAddrMode()
2769 case AArch64::STURHHi: in canFoldIntoAddrMode()
2770 case AArch64::LDURSHXi: in canFoldIntoAddrMode()
2771 case AArch64::LDURSHWi: in canFoldIntoAddrMode()
2775 case AArch64::LDRBroX: in canFoldIntoAddrMode()
2776 case AArch64::LDRBBroX: in canFoldIntoAddrMode()
2777 case AArch64::LDRSBXroX: in canFoldIntoAddrMode()
2778 case AArch64::LDRSBWroX: in canFoldIntoAddrMode()
2779 case AArch64::STRBroX: in canFoldIntoAddrMode()
2780 case AArch64::STRBBroX: in canFoldIntoAddrMode()
2781 case AArch64::LDURBi: in canFoldIntoAddrMode()
2782 case AArch64::LDURBBi: in canFoldIntoAddrMode()
2783 case AArch64::LDURSBXi: in canFoldIntoAddrMode()
2784 case AArch64::LDURSBWi: in canFoldIntoAddrMode()
2785 case AArch64::STURBi: in canFoldIntoAddrMode()
2786 case AArch64::STURBBi: in canFoldIntoAddrMode()
2787 case AArch64::LDRBui: in canFoldIntoAddrMode()
2788 case AArch64::LDRBBui: in canFoldIntoAddrMode()
2789 case AArch64::LDRSBXui: in canFoldIntoAddrMode()
2790 case AArch64::LDRSBWui: in canFoldIntoAddrMode()
2791 case AArch64::STRBui: in canFoldIntoAddrMode()
2792 case AArch64::STRBBui: in canFoldIntoAddrMode()
2796 case AArch64::LDRQroX: in canFoldIntoAddrMode()
2797 case AArch64::STRQroX: in canFoldIntoAddrMode()
2798 case AArch64::LDRQui: in canFoldIntoAddrMode()
2799 case AArch64::STRQui: in canFoldIntoAddrMode()
2804 case AArch64::LDRDroX: in canFoldIntoAddrMode()
2805 case AArch64::STRDroX: in canFoldIntoAddrMode()
2806 case AArch64::LDRXroX: in canFoldIntoAddrMode()
2807 case AArch64::STRXroX: in canFoldIntoAddrMode()
2808 case AArch64::LDRDui: in canFoldIntoAddrMode()
2809 case AArch64::STRDui: in canFoldIntoAddrMode()
2810 case AArch64::LDRXui: in canFoldIntoAddrMode()
2811 case AArch64::STRXui: in canFoldIntoAddrMode()
2816 case AArch64::LDRWroX: in canFoldIntoAddrMode()
2817 case AArch64::LDRSWroX: in canFoldIntoAddrMode()
2818 case AArch64::STRWroX: in canFoldIntoAddrMode()
2819 case AArch64::LDRWui: in canFoldIntoAddrMode()
2820 case AArch64::LDRSWui: in canFoldIntoAddrMode()
2821 case AArch64::STRWui: in canFoldIntoAddrMode()
2826 case AArch64::LDRHroX: in canFoldIntoAddrMode()
2827 case AArch64::STRHroX: in canFoldIntoAddrMode()
2828 case AArch64::LDRHHroX: in canFoldIntoAddrMode()
2829 case AArch64::STRHHroX: in canFoldIntoAddrMode()
2830 case AArch64::LDRSHXroX: in canFoldIntoAddrMode()
2831 case AArch64::LDRSHWroX: in canFoldIntoAddrMode()
2832 case AArch64::LDRHui: in canFoldIntoAddrMode()
2833 case AArch64::STRHui: in canFoldIntoAddrMode()
2834 case AArch64::LDRHHui: in canFoldIntoAddrMode()
2835 case AArch64::STRHHui: in canFoldIntoAddrMode()
2836 case AArch64::LDRSHXui: in canFoldIntoAddrMode()
2837 case AArch64::LDRSHWui: in canFoldIntoAddrMode()
2869 case AArch64::SBFMXri: in canFoldIntoAddrMode()
2895 AddrI.getOperand(3).getImm() != AArch64::sub_32) in canFoldIntoAddrMode()
2904 if (DefMI.getOpcode() != AArch64::ORRWrs || in canFoldIntoAddrMode()
2905 DefMI.getOperand(1).getReg() != AArch64::WZR || in canFoldIntoAddrMode()
2980 return (Opcode == AArch64::STURQi || Opcode == AArch64::STRQui) && in canFoldIntoAddrMode()
2990 case AArch64::ADDXri: in canFoldIntoAddrMode()
2998 case AArch64::SUBXri: in canFoldIntoAddrMode()
3006 case AArch64::ADDXrs: { in canFoldIntoAddrMode()
3027 case AArch64::ADDXrr: in canFoldIntoAddrMode()
3039 case AArch64::ADDXrx: in canFoldIntoAddrMode()
3071 case AArch64::LDURQi: in regOffsetOpcode()
3072 case AArch64::LDRQui: in regOffsetOpcode()
3073 return AArch64::LDRQroX; in regOffsetOpcode()
3074 case AArch64::STURQi: in regOffsetOpcode()
3075 case AArch64::STRQui: in regOffsetOpcode()
3076 return AArch64::STRQroX; in regOffsetOpcode()
3077 case AArch64::LDURDi: in regOffsetOpcode()
3078 case AArch64::LDRDui: in regOffsetOpcode()
3079 return AArch64::LDRDroX; in regOffsetOpcode()
3080 case AArch64::STURDi: in regOffsetOpcode()
3081 case AArch64::STRDui: in regOffsetOpcode()
3082 return AArch64::STRDroX; in regOffsetOpcode()
3083 case AArch64::LDURXi: in regOffsetOpcode()
3084 case AArch64::LDRXui: in regOffsetOpcode()
3085 return AArch64::LDRXroX; in regOffsetOpcode()
3086 case AArch64::STURXi: in regOffsetOpcode()
3087 case AArch64::STRXui: in regOffsetOpcode()
3088 return AArch64::STRXroX; in regOffsetOpcode()
3089 case AArch64::LDURWi: in regOffsetOpcode()
3090 case AArch64::LDRWui: in regOffsetOpcode()
3091 return AArch64::LDRWroX; in regOffsetOpcode()
3092 case AArch64::LDURSWi: in regOffsetOpcode()
3093 case AArch64::LDRSWui: in regOffsetOpcode()
3094 return AArch64::LDRSWroX; in regOffsetOpcode()
3095 case AArch64::STURWi: in regOffsetOpcode()
3096 case AArch64::STRWui: in regOffsetOpcode()
3097 return AArch64::STRWroX; in regOffsetOpcode()
3098 case AArch64::LDURHi: in regOffsetOpcode()
3099 case AArch64::LDRHui: in regOffsetOpcode()
3100 return AArch64::LDRHroX; in regOffsetOpcode()
3101 case AArch64::STURHi: in regOffsetOpcode()
3102 case AArch64::STRHui: in regOffsetOpcode()
3103 return AArch64::STRHroX; in regOffsetOpcode()
3104 case AArch64::LDURHHi: in regOffsetOpcode()
3105 case AArch64::LDRHHui: in regOffsetOpcode()
3106 return AArch64::LDRHHroX; in regOffsetOpcode()
3107 case AArch64::STURHHi: in regOffsetOpcode()
3108 case AArch64::STRHHui: in regOffsetOpcode()
3109 return AArch64::STRHHroX; in regOffsetOpcode()
3110 case AArch64::LDURSHXi: in regOffsetOpcode()
3111 case AArch64::LDRSHXui: in regOffsetOpcode()
3112 return AArch64::LDRSHXroX; in regOffsetOpcode()
3113 case AArch64::LDURSHWi: in regOffsetOpcode()
3114 case AArch64::LDRSHWui: in regOffsetOpcode()
3115 return AArch64::LDRSHWroX; in regOffsetOpcode()
3116 case AArch64::LDURBi: in regOffsetOpcode()
3117 case AArch64::LDRBui: in regOffsetOpcode()
3118 return AArch64::LDRBroX; in regOffsetOpcode()
3119 case AArch64::LDURBBi: in regOffsetOpcode()
3120 case AArch64::LDRBBui: in regOffsetOpcode()
3121 return AArch64::LDRBBroX; in regOffsetOpcode()
3122 case AArch64::LDURSBXi: in regOffsetOpcode()
3123 case AArch64::LDRSBXui: in regOffsetOpcode()
3124 return AArch64::LDRSBXroX; in regOffsetOpcode()
3125 case AArch64::LDURSBWi: in regOffsetOpcode()
3126 case AArch64::LDRSBWui: in regOffsetOpcode()
3127 return AArch64::LDRSBWroX; in regOffsetOpcode()
3128 case AArch64::STURBi: in regOffsetOpcode()
3129 case AArch64::STRBui: in regOffsetOpcode()
3130 return AArch64::STRBroX; in regOffsetOpcode()
3131 case AArch64::STURBBi: in regOffsetOpcode()
3132 case AArch64::STRBBui: in regOffsetOpcode()
3133 return AArch64::STRBBroX; in regOffsetOpcode()
3145 case AArch64::LDURQi: in scaledOffsetOpcode()
3147 return AArch64::LDRQui; in scaledOffsetOpcode()
3148 case AArch64::STURQi: in scaledOffsetOpcode()
3150 return AArch64::STRQui; in scaledOffsetOpcode()
3151 case AArch64::LDURDi: in scaledOffsetOpcode()
3153 return AArch64::LDRDui; in scaledOffsetOpcode()
3154 case AArch64::STURDi: in scaledOffsetOpcode()
3156 return AArch64::STRDui; in scaledOffsetOpcode()
3157 case AArch64::LDURXi: in scaledOffsetOpcode()
3159 return AArch64::LDRXui; in scaledOffsetOpcode()
3160 case AArch64::STURXi: in scaledOffsetOpcode()
3162 return AArch64::STRXui; in scaledOffsetOpcode()
3163 case AArch64::LDURWi: in scaledOffsetOpcode()
3165 return AArch64::LDRWui; in scaledOffsetOpcode()
3166 case AArch64::LDURSWi: in scaledOffsetOpcode()
3168 return AArch64::LDRSWui; in scaledOffsetOpcode()
3169 case AArch64::STURWi: in scaledOffsetOpcode()
3171 return AArch64::STRWui; in scaledOffsetOpcode()
3172 case AArch64::LDURHi: in scaledOffsetOpcode()
3174 return AArch64::LDRHui; in scaledOffsetOpcode()
3175 case AArch64::STURHi: in scaledOffsetOpcode()
3177 return AArch64::STRHui; in scaledOffsetOpcode()
3178 case AArch64::LDURHHi: in scaledOffsetOpcode()
3180 return AArch64::LDRHHui; in scaledOffsetOpcode()
3181 case AArch64::STURHHi: in scaledOffsetOpcode()
3183 return AArch64::STRHHui; in scaledOffsetOpcode()
3184 case AArch64::LDURSHXi: in scaledOffsetOpcode()
3186 return AArch64::LDRSHXui; in scaledOffsetOpcode()
3187 case AArch64::LDURSHWi: in scaledOffsetOpcode()
3189 return AArch64::LDRSHWui; in scaledOffsetOpcode()
3190 case AArch64::LDURBi: in scaledOffsetOpcode()
3192 return AArch64::LDRBui; in scaledOffsetOpcode()
3193 case AArch64::LDURBBi: in scaledOffsetOpcode()
3195 return AArch64::LDRBBui; in scaledOffsetOpcode()
3196 case AArch64::LDURSBXi: in scaledOffsetOpcode()
3198 return AArch64::LDRSBXui; in scaledOffsetOpcode()
3199 case AArch64::LDURSBWi: in scaledOffsetOpcode()
3201 return AArch64::LDRSBWui; in scaledOffsetOpcode()
3202 case AArch64::STURBi: in scaledOffsetOpcode()
3204 return AArch64::STRBui; in scaledOffsetOpcode()
3205 case AArch64::STURBBi: in scaledOffsetOpcode()
3207 return AArch64::STRBBui; in scaledOffsetOpcode()
3208 case AArch64::LDRQui: in scaledOffsetOpcode()
3209 case AArch64::STRQui: in scaledOffsetOpcode()
3212 case AArch64::LDRDui: in scaledOffsetOpcode()
3213 case AArch64::STRDui: in scaledOffsetOpcode()
3214 case AArch64::LDRXui: in scaledOffsetOpcode()
3215 case AArch64::STRXui: in scaledOffsetOpcode()
3218 case AArch64::LDRWui: in scaledOffsetOpcode()
3219 case AArch64::LDRSWui: in scaledOffsetOpcode()
3220 case AArch64::STRWui: in scaledOffsetOpcode()
3223 case AArch64::LDRHui: in scaledOffsetOpcode()
3224 case AArch64::STRHui: in scaledOffsetOpcode()
3225 case AArch64::LDRHHui: in scaledOffsetOpcode()
3226 case AArch64::STRHHui: in scaledOffsetOpcode()
3227 case AArch64::LDRSHXui: in scaledOffsetOpcode()
3228 case AArch64::LDRSHWui: in scaledOffsetOpcode()
3231 case AArch64::LDRBui: in scaledOffsetOpcode()
3232 case AArch64::LDRBBui: in scaledOffsetOpcode()
3233 case AArch64::LDRSBXui: in scaledOffsetOpcode()
3234 case AArch64::LDRSBWui: in scaledOffsetOpcode()
3235 case AArch64::STRBui: in scaledOffsetOpcode()
3236 case AArch64::STRBBui: in scaledOffsetOpcode()
3250 case AArch64::LDURQi: in unscaledOffsetOpcode()
3251 case AArch64::STURQi: in unscaledOffsetOpcode()
3252 case AArch64::LDURDi: in unscaledOffsetOpcode()
3253 case AArch64::STURDi: in unscaledOffsetOpcode()
3254 case AArch64::LDURXi: in unscaledOffsetOpcode()
3255 case AArch64::STURXi: in unscaledOffsetOpcode()
3256 case AArch64::LDURWi: in unscaledOffsetOpcode()
3257 case AArch64::LDURSWi: in unscaledOffsetOpcode()
3258 case AArch64::STURWi: in unscaledOffsetOpcode()
3259 case AArch64::LDURHi: in unscaledOffsetOpcode()
3260 case AArch64::STURHi: in unscaledOffsetOpcode()
3261 case AArch64::LDURHHi: in unscaledOffsetOpcode()
3262 case AArch64::STURHHi: in unscaledOffsetOpcode()
3263 case AArch64::LDURSHXi: in unscaledOffsetOpcode()
3264 case AArch64::LDURSHWi: in unscaledOffsetOpcode()
3265 case AArch64::LDURBi: in unscaledOffsetOpcode()
3266 case AArch64::STURBi: in unscaledOffsetOpcode()
3267 case AArch64::LDURBBi: in unscaledOffsetOpcode()
3268 case AArch64::STURBBi: in unscaledOffsetOpcode()
3269 case AArch64::LDURSBWi: in unscaledOffsetOpcode()
3270 case AArch64::LDURSBXi: in unscaledOffsetOpcode()
3272 case AArch64::LDRQui: in unscaledOffsetOpcode()
3273 return AArch64::LDURQi; in unscaledOffsetOpcode()
3274 case AArch64::STRQui: in unscaledOffsetOpcode()
3275 return AArch64::STURQi; in unscaledOffsetOpcode()
3276 case AArch64::LDRDui: in unscaledOffsetOpcode()
3277 return AArch64::LDURDi; in unscaledOffsetOpcode()
3278 case AArch64::STRDui: in unscaledOffsetOpcode()
3279 return AArch64::STURDi; in unscaledOffsetOpcode()
3280 case AArch64::LDRXui: in unscaledOffsetOpcode()
3281 return AArch64::LDURXi; in unscaledOffsetOpcode()
3282 case AArch64::STRXui: in unscaledOffsetOpcode()
3283 return AArch64::STURXi; in unscaledOffsetOpcode()
3284 case AArch64::LDRWui: in unscaledOffsetOpcode()
3285 return AArch64::LDURWi; in unscaledOffsetOpcode()
3286 case AArch64::LDRSWui: in unscaledOffsetOpcode()
3287 return AArch64::LDURSWi; in unscaledOffsetOpcode()
3288 case AArch64::STRWui: in unscaledOffsetOpcode()
3289 return AArch64::STURWi; in unscaledOffsetOpcode()
3290 case AArch64::LDRHui: in unscaledOffsetOpcode()
3291 return AArch64::LDURHi; in unscaledOffsetOpcode()
3292 case AArch64::STRHui: in unscaledOffsetOpcode()
3293 return AArch64::STURHi; in unscaledOffsetOpcode()
3294 case AArch64::LDRHHui: in unscaledOffsetOpcode()
3295 return AArch64::LDURHHi; in unscaledOffsetOpcode()
3296 case AArch64::STRHHui: in unscaledOffsetOpcode()
3297 return AArch64::STURHHi; in unscaledOffsetOpcode()
3298 case AArch64::LDRSHXui: in unscaledOffsetOpcode()
3299 return AArch64::LDURSHXi; in unscaledOffsetOpcode()
3300 case AArch64::LDRSHWui: in unscaledOffsetOpcode()
3301 return AArch64::LDURSHWi; in unscaledOffsetOpcode()
3302 case AArch64::LDRBBui: in unscaledOffsetOpcode()
3303 return AArch64::LDURBBi; in unscaledOffsetOpcode()
3304 case AArch64::LDRBui: in unscaledOffsetOpcode()
3305 return AArch64::LDURBi; in unscaledOffsetOpcode()
3306 case AArch64::STRBBui: in unscaledOffsetOpcode()
3307 return AArch64::STURBBi; in unscaledOffsetOpcode()
3308 case AArch64::STRBui: in unscaledOffsetOpcode()
3309 return AArch64::STURBi; in unscaledOffsetOpcode()
3310 case AArch64::LDRSBWui: in unscaledOffsetOpcode()
3311 return AArch64::LDURSBWi; in unscaledOffsetOpcode()
3312 case AArch64::LDRSBXui: in unscaledOffsetOpcode()
3313 return AArch64::LDURSBXi; in unscaledOffsetOpcode()
3326 case AArch64::LDRQroX: in offsetExtendOpcode()
3327 case AArch64::LDURQi: in offsetExtendOpcode()
3328 case AArch64::LDRQui: in offsetExtendOpcode()
3329 return AArch64::LDRQroW; in offsetExtendOpcode()
3330 case AArch64::STRQroX: in offsetExtendOpcode()
3331 case AArch64::STURQi: in offsetExtendOpcode()
3332 case AArch64::STRQui: in offsetExtendOpcode()
3333 return AArch64::STRQroW; in offsetExtendOpcode()
3334 case AArch64::LDRDroX: in offsetExtendOpcode()
3335 case AArch64::LDURDi: in offsetExtendOpcode()
3336 case AArch64::LDRDui: in offsetExtendOpcode()
3337 return AArch64::LDRDroW; in offsetExtendOpcode()
3338 case AArch64::STRDroX: in offsetExtendOpcode()
3339 case AArch64::STURDi: in offsetExtendOpcode()
3340 case AArch64::STRDui: in offsetExtendOpcode()
3341 return AArch64::STRDroW; in offsetExtendOpcode()
3342 case AArch64::LDRXroX: in offsetExtendOpcode()
3343 case AArch64::LDURXi: in offsetExtendOpcode()
3344 case AArch64::LDRXui: in offsetExtendOpcode()
3345 return AArch64::LDRXroW; in offsetExtendOpcode()
3346 case AArch64::STRXroX: in offsetExtendOpcode()
3347 case AArch64::STURXi: in offsetExtendOpcode()
3348 case AArch64::STRXui: in offsetExtendOpcode()
3349 return AArch64::STRXroW; in offsetExtendOpcode()
3350 case AArch64::LDRWroX: in offsetExtendOpcode()
3351 case AArch64::LDURWi: in offsetExtendOpcode()
3352 case AArch64::LDRWui: in offsetExtendOpcode()
3353 return AArch64::LDRWroW; in offsetExtendOpcode()
3354 case AArch64::LDRSWroX: in offsetExtendOpcode()
3355 case AArch64::LDURSWi: in offsetExtendOpcode()
3356 case AArch64::LDRSWui: in offsetExtendOpcode()
3357 return AArch64::LDRSWroW; in offsetExtendOpcode()
3358 case AArch64::STRWroX: in offsetExtendOpcode()
3359 case AArch64::STURWi: in offsetExtendOpcode()
3360 case AArch64::STRWui: in offsetExtendOpcode()
3361 return AArch64::STRWroW; in offsetExtendOpcode()
3362 case AArch64::LDRHroX: in offsetExtendOpcode()
3363 case AArch64::LDURHi: in offsetExtendOpcode()
3364 case AArch64::LDRHui: in offsetExtendOpcode()
3365 return AArch64::LDRHroW; in offsetExtendOpcode()
3366 case AArch64::STRHroX: in offsetExtendOpcode()
3367 case AArch64::STURHi: in offsetExtendOpcode()
3368 case AArch64::STRHui: in offsetExtendOpcode()
3369 return AArch64::STRHroW; in offsetExtendOpcode()
3370 case AArch64::LDRHHroX: in offsetExtendOpcode()
3371 case AArch64::LDURHHi: in offsetExtendOpcode()
3372 case AArch64::LDRHHui: in offsetExtendOpcode()
3373 return AArch64::LDRHHroW; in offsetExtendOpcode()
3374 case AArch64::STRHHroX: in offsetExtendOpcode()
3375 case AArch64::STURHHi: in offsetExtendOpcode()
3376 case AArch64::STRHHui: in offsetExtendOpcode()
3377 return AArch64::STRHHroW; in offsetExtendOpcode()
3378 case AArch64::LDRSHXroX: in offsetExtendOpcode()
3379 case AArch64::LDURSHXi: in offsetExtendOpcode()
3380 case AArch64::LDRSHXui: in offsetExtendOpcode()
3381 return AArch64::LDRSHXroW; in offsetExtendOpcode()
3382 case AArch64::LDRSHWroX: in offsetExtendOpcode()
3383 case AArch64::LDURSHWi: in offsetExtendOpcode()
3384 case AArch64::LDRSHWui: in offsetExtendOpcode()
3385 return AArch64::LDRSHWroW; in offsetExtendOpcode()
3386 case AArch64::LDRBroX: in offsetExtendOpcode()
3387 case AArch64::LDURBi: in offsetExtendOpcode()
3388 case AArch64::LDRBui: in offsetExtendOpcode()
3389 return AArch64::LDRBroW; in offsetExtendOpcode()
3390 case AArch64::LDRBBroX: in offsetExtendOpcode()
3391 case AArch64::LDURBBi: in offsetExtendOpcode()
3392 case AArch64::LDRBBui: in offsetExtendOpcode()
3393 return AArch64::LDRBBroW; in offsetExtendOpcode()
3394 case AArch64::LDRSBXroX: in offsetExtendOpcode()
3395 case AArch64::LDURSBXi: in offsetExtendOpcode()
3396 case AArch64::LDRSBXui: in offsetExtendOpcode()
3397 return AArch64::LDRSBXroW; in offsetExtendOpcode()
3398 case AArch64::LDRSBWroX: in offsetExtendOpcode()
3399 case AArch64::LDURSBWi: in offsetExtendOpcode()
3400 case AArch64::LDRSBWui: in offsetExtendOpcode()
3401 return AArch64::LDRSBWroW; in offsetExtendOpcode()
3402 case AArch64::STRBroX: in offsetExtendOpcode()
3403 case AArch64::STURBi: in offsetExtendOpcode()
3404 case AArch64::STRBui: in offsetExtendOpcode()
3405 return AArch64::STRBroW; in offsetExtendOpcode()
3406 case AArch64::STRBBroX: in offsetExtendOpcode()
3407 case AArch64::STURBBi: in offsetExtendOpcode()
3408 case AArch64::STRBBui: in offsetExtendOpcode()
3409 return AArch64::STRBBroW; in offsetExtendOpcode()
3424 MRI.constrainRegClass(AM.BaseReg, &AArch64::GPR64spRegClass); in emitLdStWithAddr()
3464 MRI.constrainRegClass(AM.BaseReg, &AArch64::GPR64spRegClass); in emitLdStWithAddr()
3468 if (RC->hasSuperClassEq(&AArch64::GPR64RegClass)) { in emitLdStWithAddr()
3469 OffsetReg = MRI.createVirtualRegister(&AArch64::GPR32RegClass); in emitLdStWithAddr()
3471 .addReg(AM.ScaledReg, 0, AArch64::sub_32); in emitLdStWithAddr()
3557 case AArch64::LDRQui: in getMemOpInfo()
3558 case AArch64::STRQui: in getMemOpInfo()
3564 case AArch64::LDRXui: in getMemOpInfo()
3565 case AArch64::LDRDui: in getMemOpInfo()
3566 case AArch64::STRXui: in getMemOpInfo()
3567 case AArch64::STRDui: in getMemOpInfo()
3568 case AArch64::PRFMui: in getMemOpInfo()
3574 case AArch64::LDRWui: in getMemOpInfo()
3575 case AArch64::LDRSui: in getMemOpInfo()
3576 case AArch64::LDRSWui: in getMemOpInfo()
3577 case AArch64::STRWui: in getMemOpInfo()
3578 case AArch64::STRSui: in getMemOpInfo()
3584 case AArch64::LDRHui: in getMemOpInfo()
3585 case AArch64::LDRHHui: in getMemOpInfo()
3586 case AArch64::LDRSHWui: in getMemOpInfo()
3587 case AArch64::LDRSHXui: in getMemOpInfo()
3588 case AArch64::STRHui: in getMemOpInfo()
3589 case AArch64::STRHHui: in getMemOpInfo()
3595 case AArch64::LDRBui: in getMemOpInfo()
3596 case AArch64::LDRBBui: in getMemOpInfo()
3597 case AArch64::LDRSBWui: in getMemOpInfo()
3598 case AArch64::LDRSBXui: in getMemOpInfo()
3599 case AArch64::STRBui: in getMemOpInfo()
3600 case AArch64::STRBBui: in getMemOpInfo()
3607 case AArch64::STRQpre: in getMemOpInfo()
3608 case AArch64::LDRQpost: in getMemOpInfo()
3614 case AArch64::STRXpre: in getMemOpInfo()
3615 case AArch64::STRDpre: in getMemOpInfo()
3616 case AArch64::LDRXpost: in getMemOpInfo()
3617 case AArch64::LDRDpost: in getMemOpInfo()
3623 case AArch64::STRWpost: in getMemOpInfo()
3624 case AArch64::LDRWpost: in getMemOpInfo()
3631 case AArch64::LDURQi: in getMemOpInfo()
3632 case AArch64::STURQi: in getMemOpInfo()
3638 case AArch64::LDURXi: in getMemOpInfo()
3639 case AArch64::LDURDi: in getMemOpInfo()
3640 case AArch64::LDAPURXi: in getMemOpInfo()
3641 case AArch64::STURXi: in getMemOpInfo()
3642 case AArch64::STURDi: in getMemOpInfo()
3643 case AArch64::STLURXi: in getMemOpInfo()
3644 case AArch64::PRFUMi: in getMemOpInfo()
3650 case AArch64::LDURWi: in getMemOpInfo()
3651 case AArch64::LDURSi: in getMemOpInfo()
3652 case AArch64::LDURSWi: in getMemOpInfo()
3653 case AArch64::LDAPURi: in getMemOpInfo()
3654 case AArch64::LDAPURSWi: in getMemOpInfo()
3655 case AArch64::STURWi: in getMemOpInfo()
3656 case AArch64::STURSi: in getMemOpInfo()
3657 case AArch64::STLURWi: in getMemOpInfo()
3663 case AArch64::LDURHi: in getMemOpInfo()
3664 case AArch64::LDURHHi: in getMemOpInfo()
3665 case AArch64::LDURSHXi: in getMemOpInfo()
3666 case AArch64::LDURSHWi: in getMemOpInfo()
3667 case AArch64::LDAPURHi: in getMemOpInfo()
3668 case AArch64::LDAPURSHWi: in getMemOpInfo()
3669 case AArch64::LDAPURSHXi: in getMemOpInfo()
3670 case AArch64::STURHi: in getMemOpInfo()
3671 case AArch64::STURHHi: in getMemOpInfo()
3672 case AArch64::STLURHi: in getMemOpInfo()
3678 case AArch64::LDURBi: in getMemOpInfo()
3679 case AArch64::LDURBBi: in getMemOpInfo()
3680 case AArch64::LDURSBXi: in getMemOpInfo()
3681 case AArch64::LDURSBWi: in getMemOpInfo()
3682 case AArch64::LDAPURBi: in getMemOpInfo()
3683 case AArch64::LDAPURSBWi: in getMemOpInfo()
3684 case AArch64::LDAPURSBXi: in getMemOpInfo()
3685 case AArch64::STURBi: in getMemOpInfo()
3686 case AArch64::STURBBi: in getMemOpInfo()
3687 case AArch64::STLURBi: in getMemOpInfo()
3694 case AArch64::LDPQi: in getMemOpInfo()
3695 case AArch64::LDNPQi: in getMemOpInfo()
3696 case AArch64::STPQi: in getMemOpInfo()
3697 case AArch64::STNPQi: in getMemOpInfo()
3703 case AArch64::LDPXi: in getMemOpInfo()
3704 case AArch64::LDPDi: in getMemOpInfo()
3705 case AArch64::LDNPXi: in getMemOpInfo()
3706 case AArch64::LDNPDi: in getMemOpInfo()
3707 case AArch64::STPXi: in getMemOpInfo()
3708 case AArch64::STPDi: in getMemOpInfo()
3709 case AArch64::STNPXi: in getMemOpInfo()
3710 case AArch64::STNPDi: in getMemOpInfo()
3716 case AArch64::LDPWi: in getMemOpInfo()
3717 case AArch64::LDPSi: in getMemOpInfo()
3718 case AArch64::LDNPWi: in getMemOpInfo()
3719 case AArch64::LDNPSi: in getMemOpInfo()
3720 case AArch64::STPWi: in getMemOpInfo()
3721 case AArch64::STPSi: in getMemOpInfo()
3722 case AArch64::STNPWi: in getMemOpInfo()
3723 case AArch64::STNPSi: in getMemOpInfo()
3730 case AArch64::STPQpre: in getMemOpInfo()
3731 case AArch64::LDPQpost: in getMemOpInfo()
3737 case AArch64::STPXpre: in getMemOpInfo()
3738 case AArch64::LDPXpost: in getMemOpInfo()
3739 case AArch64::STPDpre: in getMemOpInfo()
3740 case AArch64::LDPDpost: in getMemOpInfo()
3746 case AArch64::StoreSwiftAsyncContext: in getMemOpInfo()
3753 case AArch64::ADDG: in getMemOpInfo()
3759 case AArch64::TAGPstack: in getMemOpInfo()
3767 case AArch64::LDG: in getMemOpInfo()
3768 case AArch64::STGi: in getMemOpInfo()
3769 case AArch64::STZGi: in getMemOpInfo()
3776 case AArch64::STR_ZZZZXI: in getMemOpInfo()
3777 case AArch64::LDR_ZZZZXI: in getMemOpInfo()
3783 case AArch64::STR_ZZZXI: in getMemOpInfo()
3784 case AArch64::LDR_ZZZXI: in getMemOpInfo()
3790 case AArch64::STR_ZZXI: in getMemOpInfo()
3791 case AArch64::LDR_ZZXI: in getMemOpInfo()
3797 case AArch64::LDR_PXI: in getMemOpInfo()
3798 case AArch64::STR_PXI: in getMemOpInfo()
3804 case AArch64::LDR_PPXI: in getMemOpInfo()
3805 case AArch64::STR_PPXI: in getMemOpInfo()
3811 case AArch64::LDR_ZXI: in getMemOpInfo()
3812 case AArch64::STR_ZXI: in getMemOpInfo()
3818 case AArch64::LD1B_IMM: in getMemOpInfo()
3819 case AArch64::LD1H_IMM: in getMemOpInfo()
3820 case AArch64::LD1W_IMM: in getMemOpInfo()
3821 case AArch64::LD1D_IMM: in getMemOpInfo()
3822 case AArch64::LDNT1B_ZRI: in getMemOpInfo()
3823 case AArch64::LDNT1H_ZRI: in getMemOpInfo()
3824 case AArch64::LDNT1W_ZRI: in getMemOpInfo()
3825 case AArch64::LDNT1D_ZRI: in getMemOpInfo()
3826 case AArch64::ST1B_IMM: in getMemOpInfo()
3827 case AArch64::ST1H_IMM: in getMemOpInfo()
3828 case AArch64::ST1W_IMM: in getMemOpInfo()
3829 case AArch64::ST1D_IMM: in getMemOpInfo()
3830 case AArch64::STNT1B_ZRI: in getMemOpInfo()
3831 case AArch64::STNT1H_ZRI: in getMemOpInfo()
3832 case AArch64::STNT1W_ZRI: in getMemOpInfo()
3833 case AArch64::STNT1D_ZRI: in getMemOpInfo()
3834 case AArch64::LDNF1B_IMM: in getMemOpInfo()
3835 case AArch64::LDNF1H_IMM: in getMemOpInfo()
3836 case AArch64::LDNF1W_IMM: in getMemOpInfo()
3837 case AArch64::LDNF1D_IMM: in getMemOpInfo()
3845 case AArch64::LD2B_IMM: in getMemOpInfo()
3846 case AArch64::LD2H_IMM: in getMemOpInfo()
3847 case AArch64::LD2W_IMM: in getMemOpInfo()
3848 case AArch64::LD2D_IMM: in getMemOpInfo()
3849 case AArch64::ST2B_IMM: in getMemOpInfo()
3850 case AArch64::ST2H_IMM: in getMemOpInfo()
3851 case AArch64::ST2W_IMM: in getMemOpInfo()
3852 case AArch64::ST2D_IMM: in getMemOpInfo()
3858 case AArch64::LD3B_IMM: in getMemOpInfo()
3859 case AArch64::LD3H_IMM: in getMemOpInfo()
3860 case AArch64::LD3W_IMM: in getMemOpInfo()
3861 case AArch64::LD3D_IMM: in getMemOpInfo()
3862 case AArch64::ST3B_IMM: in getMemOpInfo()
3863 case AArch64::ST3H_IMM: in getMemOpInfo()
3864 case AArch64::ST3W_IMM: in getMemOpInfo()
3865 case AArch64::ST3D_IMM: in getMemOpInfo()
3871 case AArch64::LD4B_IMM: in getMemOpInfo()
3872 case AArch64::LD4H_IMM: in getMemOpInfo()
3873 case AArch64::LD4W_IMM: in getMemOpInfo()
3874 case AArch64::LD4D_IMM: in getMemOpInfo()
3875 case AArch64::ST4B_IMM: in getMemOpInfo()
3876 case AArch64::ST4H_IMM: in getMemOpInfo()
3877 case AArch64::ST4W_IMM: in getMemOpInfo()
3878 case AArch64::ST4D_IMM: in getMemOpInfo()
3884 case AArch64::LD1B_H_IMM: in getMemOpInfo()
3885 case AArch64::LD1SB_H_IMM: in getMemOpInfo()
3886 case AArch64::LD1H_S_IMM: in getMemOpInfo()
3887 case AArch64::LD1SH_S_IMM: in getMemOpInfo()
3888 case AArch64::LD1W_D_IMM: in getMemOpInfo()
3889 case AArch64::LD1SW_D_IMM: in getMemOpInfo()
3890 case AArch64::ST1B_H_IMM: in getMemOpInfo()
3891 case AArch64::ST1H_S_IMM: in getMemOpInfo()
3892 case AArch64::ST1W_D_IMM: in getMemOpInfo()
3893 case AArch64::LDNF1B_H_IMM: in getMemOpInfo()
3894 case AArch64::LDNF1SB_H_IMM: in getMemOpInfo()
3895 case AArch64::LDNF1H_S_IMM: in getMemOpInfo()
3896 case AArch64::LDNF1SH_S_IMM: in getMemOpInfo()
3897 case AArch64::LDNF1W_D_IMM: in getMemOpInfo()
3898 case AArch64::LDNF1SW_D_IMM: in getMemOpInfo()
3906 case AArch64::LD1B_S_IMM: in getMemOpInfo()
3907 case AArch64::LD1SB_S_IMM: in getMemOpInfo()
3908 case AArch64::LD1H_D_IMM: in getMemOpInfo()
3909 case AArch64::LD1SH_D_IMM: in getMemOpInfo()
3910 case AArch64::ST1B_S_IMM: in getMemOpInfo()
3911 case AArch64::ST1H_D_IMM: in getMemOpInfo()
3912 case AArch64::LDNF1B_S_IMM: in getMemOpInfo()
3913 case AArch64::LDNF1SB_S_IMM: in getMemOpInfo()
3914 case AArch64::LDNF1H_D_IMM: in getMemOpInfo()
3915 case AArch64::LDNF1SH_D_IMM: in getMemOpInfo()
3923 case AArch64::LD1B_D_IMM: in getMemOpInfo()
3924 case AArch64::LD1SB_D_IMM: in getMemOpInfo()
3925 case AArch64::ST1B_D_IMM: in getMemOpInfo()
3926 case AArch64::LDNF1B_D_IMM: in getMemOpInfo()
3927 case AArch64::LDNF1SB_D_IMM: in getMemOpInfo()
3935 case AArch64::ST2Gi: in getMemOpInfo()
3936 case AArch64::STZ2Gi: in getMemOpInfo()
3942 case AArch64::STGPi: in getMemOpInfo()
3948 case AArch64::LD1RB_IMM: in getMemOpInfo()
3949 case AArch64::LD1RB_H_IMM: in getMemOpInfo()
3950 case AArch64::LD1RB_S_IMM: in getMemOpInfo()
3951 case AArch64::LD1RB_D_IMM: in getMemOpInfo()
3952 case AArch64::LD1RSB_H_IMM: in getMemOpInfo()
3953 case AArch64::LD1RSB_S_IMM: in getMemOpInfo()
3954 case AArch64::LD1RSB_D_IMM: in getMemOpInfo()
3960 case AArch64::LD1RH_IMM: in getMemOpInfo()
3961 case AArch64::LD1RH_S_IMM: in getMemOpInfo()
3962 case AArch64::LD1RH_D_IMM: in getMemOpInfo()
3963 case AArch64::LD1RSH_S_IMM: in getMemOpInfo()
3964 case AArch64::LD1RSH_D_IMM: in getMemOpInfo()
3970 case AArch64::LD1RW_IMM: in getMemOpInfo()
3971 case AArch64::LD1RW_D_IMM: in getMemOpInfo()
3972 case AArch64::LD1RSW_IMM: in getMemOpInfo()
3978 case AArch64::LD1RD_IMM: in getMemOpInfo()
3994 case AArch64::LDRBBui: in getMemScale()
3995 case AArch64::LDURBBi: in getMemScale()
3996 case AArch64::LDRSBWui: in getMemScale()
3997 case AArch64::LDURSBWi: in getMemScale()
3998 case AArch64::STRBBui: in getMemScale()
3999 case AArch64::STURBBi: in getMemScale()
4001 case AArch64::LDRHHui: in getMemScale()
4002 case AArch64::LDURHHi: in getMemScale()
4003 case AArch64::LDRSHWui: in getMemScale()
4004 case AArch64::LDURSHWi: in getMemScale()
4005 case AArch64::STRHHui: in getMemScale()
4006 case AArch64::STURHHi: in getMemScale()
4008 case AArch64::LDRSui: in getMemScale()
4009 case AArch64::LDURSi: in getMemScale()
4010 case AArch64::LDRSpre: in getMemScale()
4011 case AArch64::LDRSWui: in getMemScale()
4012 case AArch64::LDURSWi: in getMemScale()
4013 case AArch64::LDRSWpre: in getMemScale()
4014 case AArch64::LDRWpre: in getMemScale()
4015 case AArch64::LDRWui: in getMemScale()
4016 case AArch64::LDURWi: in getMemScale()
4017 case AArch64::STRSui: in getMemScale()
4018 case AArch64::STURSi: in getMemScale()
4019 case AArch64::STRSpre: in getMemScale()
4020 case AArch64::STRWui: in getMemScale()
4021 case AArch64::STURWi: in getMemScale()
4022 case AArch64::STRWpre: in getMemScale()
4023 case AArch64::LDPSi: in getMemScale()
4024 case AArch64::LDPSWi: in getMemScale()
4025 case AArch64::LDPWi: in getMemScale()
4026 case AArch64::STPSi: in getMemScale()
4027 case AArch64::STPWi: in getMemScale()
4029 case AArch64::LDRDui: in getMemScale()
4030 case AArch64::LDURDi: in getMemScale()
4031 case AArch64::LDRDpre: in getMemScale()
4032 case AArch64::LDRXui: in getMemScale()
4033 case AArch64::LDURXi: in getMemScale()
4034 case AArch64::LDRXpre: in getMemScale()
4035 case AArch64::STRDui: in getMemScale()
4036 case AArch64::STURDi: in getMemScale()
4037 case AArch64::STRDpre: in getMemScale()
4038 case AArch64::STRXui: in getMemScale()
4039 case AArch64::STURXi: in getMemScale()
4040 case AArch64::STRXpre: in getMemScale()
4041 case AArch64::LDPDi: in getMemScale()
4042 case AArch64::LDPXi: in getMemScale()
4043 case AArch64::STPDi: in getMemScale()
4044 case AArch64::STPXi: in getMemScale()
4046 case AArch64::LDRQui: in getMemScale()
4047 case AArch64::LDURQi: in getMemScale()
4048 case AArch64::STRQui: in getMemScale()
4049 case AArch64::STURQi: in getMemScale()
4050 case AArch64::STRQpre: in getMemScale()
4051 case AArch64::LDPQi: in getMemScale()
4052 case AArch64::LDRQpre: in getMemScale()
4053 case AArch64::STPQi: in getMemScale()
4054 case AArch64::STGi: in getMemScale()
4055 case AArch64::STZGi: in getMemScale()
4056 case AArch64::ST2Gi: in getMemScale()
4057 case AArch64::STZ2Gi: in getMemScale()
4058 case AArch64::STGPi: in getMemScale()
4067 case AArch64::LDRWpre: in isPreLd()
4068 case AArch64::LDRXpre: in isPreLd()
4069 case AArch64::LDRSWpre: in isPreLd()
4070 case AArch64::LDRSpre: in isPreLd()
4071 case AArch64::LDRDpre: in isPreLd()
4072 case AArch64::LDRQpre: in isPreLd()
4081 case AArch64::STRWpre: in isPreSt()
4082 case AArch64::STRXpre: in isPreSt()
4083 case AArch64::STRSpre: in isPreSt()
4084 case AArch64::STRDpre: in isPreSt()
4085 case AArch64::STRQpre: in isPreSt()
4098 case AArch64::LDPSi: in isPairedLdSt()
4099 case AArch64::LDPSWi: in isPairedLdSt()
4100 case AArch64::LDPDi: in isPairedLdSt()
4101 case AArch64::LDPQi: in isPairedLdSt()
4102 case AArch64::LDPWi: in isPairedLdSt()
4103 case AArch64::LDPXi: in isPairedLdSt()
4104 case AArch64::STPSi: in isPairedLdSt()
4105 case AArch64::STPDi: in isPairedLdSt()
4106 case AArch64::STPQi: in isPairedLdSt()
4107 case AArch64::STPWi: in isPairedLdSt()
4108 case AArch64::STPXi: in isPairedLdSt()
4109 case AArch64::STGPi: in isPairedLdSt()
4143 return AArch64::FPR16RegClass.contains(Reg); in isHForm()
4145 return TRC == &AArch64::FPR16RegClass || in isHForm()
4146 TRC == &AArch64::FPR16_loRegClass; in isHForm()
4157 return AArch64::FPR128RegClass.contains(Reg); in isQForm()
4159 return TRC == &AArch64::FPR128RegClass || in isQForm()
4160 TRC == &AArch64::FPR128_loRegClass; in isQForm()
4167 case AArch64::BRK: in hasBTISemantics()
4168 case AArch64::HLT: in hasBTISemantics()
4169 case AArch64::PACIASP: in hasBTISemantics()
4170 case AArch64::PACIBSP: in hasBTISemantics()
4173 case AArch64::PAUTH_PROLOGUE: in hasBTISemantics()
4176 case AArch64::HINT: { in hasBTISemantics()
4195 return AArch64::FPR128RegClass.contains(Reg) || in isFpOrNEON()
4196 AArch64::FPR64RegClass.contains(Reg) || in isFpOrNEON()
4197 AArch64::FPR32RegClass.contains(Reg) || in isFpOrNEON()
4198 AArch64::FPR16RegClass.contains(Reg) || in isFpOrNEON()
4199 AArch64::FPR8RegClass.contains(Reg); in isFpOrNEON()
4211 return TRC == &AArch64::FPR128RegClass || in isFpOrNEON()
4212 TRC == &AArch64::FPR128_loRegClass || in isFpOrNEON()
4213 TRC == &AArch64::FPR64RegClass || in isFpOrNEON()
4214 TRC == &AArch64::FPR64_loRegClass || in isFpOrNEON()
4215 TRC == &AArch64::FPR32RegClass || TRC == &AArch64::FPR16RegClass || in isFpOrNEON()
4216 TRC == &AArch64::FPR8RegClass; in isFpOrNEON()
4244 case AArch64::STRSui: in canPairLdStOpc()
4245 case AArch64::STURSi: in canPairLdStOpc()
4246 return SecondOpc == AArch64::STRSui || SecondOpc == AArch64::STURSi; in canPairLdStOpc()
4247 case AArch64::STRDui: in canPairLdStOpc()
4248 case AArch64::STURDi: in canPairLdStOpc()
4249 return SecondOpc == AArch64::STRDui || SecondOpc == AArch64::STURDi; in canPairLdStOpc()
4250 case AArch64::STRQui: in canPairLdStOpc()
4251 case AArch64::STURQi: in canPairLdStOpc()
4252 return SecondOpc == AArch64::STRQui || SecondOpc == AArch64::STURQi; in canPairLdStOpc()
4253 case AArch64::STRWui: in canPairLdStOpc()
4254 case AArch64::STURWi: in canPairLdStOpc()
4255 return SecondOpc == AArch64::STRWui || SecondOpc == AArch64::STURWi; in canPairLdStOpc()
4256 case AArch64::STRXui: in canPairLdStOpc()
4257 case AArch64::STURXi: in canPairLdStOpc()
4258 return SecondOpc == AArch64::STRXui || SecondOpc == AArch64::STURXi; in canPairLdStOpc()
4259 case AArch64::LDRSui: in canPairLdStOpc()
4260 case AArch64::LDURSi: in canPairLdStOpc()
4261 return SecondOpc == AArch64::LDRSui || SecondOpc == AArch64::LDURSi; in canPairLdStOpc()
4262 case AArch64::LDRDui: in canPairLdStOpc()
4263 case AArch64::LDURDi: in canPairLdStOpc()
4264 return SecondOpc == AArch64::LDRDui || SecondOpc == AArch64::LDURDi; in canPairLdStOpc()
4265 case AArch64::LDRQui: in canPairLdStOpc()
4266 case AArch64::LDURQi: in canPairLdStOpc()
4267 return SecondOpc == AArch64::LDRQui || SecondOpc == AArch64::LDURQi; in canPairLdStOpc()
4268 case AArch64::LDRWui: in canPairLdStOpc()
4269 case AArch64::LDURWi: in canPairLdStOpc()
4270 return SecondOpc == AArch64::LDRSWui || SecondOpc == AArch64::LDURSWi; in canPairLdStOpc()
4271 case AArch64::LDRSWui: in canPairLdStOpc()
4272 case AArch64::LDURSWi: in canPairLdStOpc()
4273 return SecondOpc == AArch64::LDRWui || SecondOpc == AArch64::LDURWi; in canPairLdStOpc()
4274 case AArch64::LDRXui: in canPairLdStOpc()
4275 case AArch64::LDURXi: in canPairLdStOpc()
4276 return SecondOpc == AArch64::LDRXui || SecondOpc == AArch64::LDURXi; in canPairLdStOpc()
4455 if (AArch64::GPR32spRegClass.contains(DestReg) && in copyPhysReg()
4456 (AArch64::GPR32spRegClass.contains(SrcReg) || SrcReg == AArch64::WZR)) { in copyPhysReg()
4459 if (DestReg == AArch64::WSP || SrcReg == AArch64::WSP) { in copyPhysReg()
4464 DestReg, AArch64::sub_32, &AArch64::GPR64spRegClass); in copyPhysReg()
4466 SrcReg, AArch64::sub_32, &AArch64::GPR64spRegClass); in copyPhysReg()
4471 BuildMI(MBB, I, DL, get(AArch64::ADDXri), DestRegX) in copyPhysReg()
4477 BuildMI(MBB, I, DL, get(AArch64::ADDWri), DestReg) in copyPhysReg()
4482 } else if (SrcReg == AArch64::WZR && Subtarget.hasZeroCycleZeroingGP()) { in copyPhysReg()
4483 BuildMI(MBB, I, DL, get(AArch64::MOVZWi), DestReg) in copyPhysReg()
4490 DestReg, AArch64::sub_32, &AArch64::GPR64spRegClass); in copyPhysReg()
4492 SrcReg, AArch64::sub_32, &AArch64::GPR64spRegClass); in copyPhysReg()
4497 BuildMI(MBB, I, DL, get(AArch64::ORRXrr), DestRegX) in copyPhysReg()
4498 .addReg(AArch64::XZR) in copyPhysReg()
4503 BuildMI(MBB, I, DL, get(AArch64::ORRWrr), DestReg) in copyPhysReg()
4504 .addReg(AArch64::WZR) in copyPhysReg()
4512 if (AArch64::PPRRegClass.contains(DestReg) && in copyPhysReg()
4513 AArch64::PPRRegClass.contains(SrcReg)) { in copyPhysReg()
4516 BuildMI(MBB, I, DL, get(AArch64::ORR_PPzPP), DestReg) in copyPhysReg()
4525 bool DestIsPNR = AArch64::PNRRegClass.contains(DestReg); in copyPhysReg()
4526 bool SrcIsPNR = AArch64::PNRRegClass.contains(SrcReg); in copyPhysReg()
4529 return (R - AArch64::PN0) + AArch64::P0; in copyPhysReg()
4535 auto NewMI = BuildMI(MBB, I, DL, get(AArch64::ORR_PPzPP), PPRDestReg) in copyPhysReg()
4546 if (AArch64::ZPRRegClass.contains(DestReg) && in copyPhysReg()
4547 AArch64::ZPRRegClass.contains(SrcReg)) { in copyPhysReg()
4550 BuildMI(MBB, I, DL, get(AArch64::ORR_ZZZ), DestReg) in copyPhysReg()
4557 if ((AArch64::ZPR2RegClass.contains(DestReg) || in copyPhysReg()
4558 AArch64::ZPR2StridedOrContiguousRegClass.contains(DestReg)) && in copyPhysReg()
4559 (AArch64::ZPR2RegClass.contains(SrcReg) || in copyPhysReg()
4560 AArch64::ZPR2StridedOrContiguousRegClass.contains(SrcReg))) { in copyPhysReg()
4563 static const unsigned Indices[] = {AArch64::zsub0, AArch64::zsub1}; in copyPhysReg()
4564 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORR_ZZZ, in copyPhysReg()
4570 if (AArch64::ZPR3RegClass.contains(DestReg) && in copyPhysReg()
4571 AArch64::ZPR3RegClass.contains(SrcReg)) { in copyPhysReg()
4574 static const unsigned Indices[] = {AArch64::zsub0, AArch64::zsub1, in copyPhysReg()
4575 AArch64::zsub2}; in copyPhysReg()
4576 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORR_ZZZ, in copyPhysReg()
4582 if ((AArch64::ZPR4RegClass.contains(DestReg) || in copyPhysReg()
4583 AArch64::ZPR4StridedOrContiguousRegClass.contains(DestReg)) && in copyPhysReg()
4584 (AArch64::ZPR4RegClass.contains(SrcReg) || in copyPhysReg()
4585 AArch64::ZPR4StridedOrContiguousRegClass.contains(SrcReg))) { in copyPhysReg()
4588 static const unsigned Indices[] = {AArch64::zsub0, AArch64::zsub1, in copyPhysReg()
4589 AArch64::zsub2, AArch64::zsub3}; in copyPhysReg()
4590 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORR_ZZZ, in copyPhysReg()
4595 if (AArch64::GPR64spRegClass.contains(DestReg) && in copyPhysReg()
4596 (AArch64::GPR64spRegClass.contains(SrcReg) || SrcReg == AArch64::XZR)) { in copyPhysReg()
4597 if (DestReg == AArch64::SP || SrcReg == AArch64::SP) { in copyPhysReg()
4599 BuildMI(MBB, I, DL, get(AArch64::ADDXri), DestReg) in copyPhysReg()
4603 } else if (SrcReg == AArch64::XZR && Subtarget.hasZeroCycleZeroingGP()) { in copyPhysReg()
4604 BuildMI(MBB, I, DL, get(AArch64::MOVZXi), DestReg) in copyPhysReg()
4609 BuildMI(MBB, I, DL, get(AArch64::ORRXrr), DestReg) in copyPhysReg()
4610 .addReg(AArch64::XZR) in copyPhysReg()
4617 if (AArch64::DDDDRegClass.contains(DestReg) && in copyPhysReg()
4618 AArch64::DDDDRegClass.contains(SrcReg)) { in copyPhysReg()
4619 static const unsigned Indices[] = {AArch64::dsub0, AArch64::dsub1, in copyPhysReg()
4620 AArch64::dsub2, AArch64::dsub3}; in copyPhysReg()
4621 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8, in copyPhysReg()
4627 if (AArch64::DDDRegClass.contains(DestReg) && in copyPhysReg()
4628 AArch64::DDDRegClass.contains(SrcReg)) { in copyPhysReg()
4629 static const unsigned Indices[] = {AArch64::dsub0, AArch64::dsub1, in copyPhysReg()
4630 AArch64::dsub2}; in copyPhysReg()
4631 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8, in copyPhysReg()
4637 if (AArch64::DDRegClass.contains(DestReg) && in copyPhysReg()
4638 AArch64::DDRegClass.contains(SrcReg)) { in copyPhysReg()
4639 static const unsigned Indices[] = {AArch64::dsub0, AArch64::dsub1}; in copyPhysReg()
4640 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv8i8, in copyPhysReg()
4646 if (AArch64::QQQQRegClass.contains(DestReg) && in copyPhysReg()
4647 AArch64::QQQQRegClass.contains(SrcReg)) { in copyPhysReg()
4648 static const unsigned Indices[] = {AArch64::qsub0, AArch64::qsub1, in copyPhysReg()
4649 AArch64::qsub2, AArch64::qsub3}; in copyPhysReg()
4650 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8, in copyPhysReg()
4656 if (AArch64::QQQRegClass.contains(DestReg) && in copyPhysReg()
4657 AArch64::QQQRegClass.contains(SrcReg)) { in copyPhysReg()
4658 static const unsigned Indices[] = {AArch64::qsub0, AArch64::qsub1, in copyPhysReg()
4659 AArch64::qsub2}; in copyPhysReg()
4660 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8, in copyPhysReg()
4666 if (AArch64::QQRegClass.contains(DestReg) && in copyPhysReg()
4667 AArch64::QQRegClass.contains(SrcReg)) { in copyPhysReg()
4668 static const unsigned Indices[] = {AArch64::qsub0, AArch64::qsub1}; in copyPhysReg()
4669 copyPhysRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRv16i8, in copyPhysReg()
4674 if (AArch64::XSeqPairsClassRegClass.contains(DestReg) && in copyPhysReg()
4675 AArch64::XSeqPairsClassRegClass.contains(SrcReg)) { in copyPhysReg()
4676 static const unsigned Indices[] = {AArch64::sube64, AArch64::subo64}; in copyPhysReg()
4677 copyGPRRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRXrs, in copyPhysReg()
4678 AArch64::XZR, Indices); in copyPhysReg()
4682 if (AArch64::WSeqPairsClassRegClass.contains(DestReg) && in copyPhysReg()
4683 AArch64::WSeqPairsClassRegClass.contains(SrcReg)) { in copyPhysReg()
4684 static const unsigned Indices[] = {AArch64::sube32, AArch64::subo32}; in copyPhysReg()
4685 copyGPRRegTuple(MBB, I, DL, DestReg, SrcReg, KillSrc, AArch64::ORRWrs, in copyPhysReg()
4686 AArch64::WZR, Indices); in copyPhysReg()
4690 if (AArch64::FPR128RegClass.contains(DestReg) && in copyPhysReg()
4691 AArch64::FPR128RegClass.contains(SrcReg)) { in copyPhysReg()
4694 BuildMI(MBB, I, DL, get(AArch64::ORR_ZZZ)) in copyPhysReg()
4695 .addReg(AArch64::Z0 + (DestReg - AArch64::Q0), RegState::Define) in copyPhysReg()
4696 .addReg(AArch64::Z0 + (SrcReg - AArch64::Q0)) in copyPhysReg()
4697 .addReg(AArch64::Z0 + (SrcReg - AArch64::Q0)); in copyPhysReg()
4699 BuildMI(MBB, I, DL, get(AArch64::ORRv16i8), DestReg) in copyPhysReg()
4703 BuildMI(MBB, I, DL, get(AArch64::STRQpre)) in copyPhysReg()
4704 .addReg(AArch64::SP, RegState::Define) in copyPhysReg()
4706 .addReg(AArch64::SP) in copyPhysReg()
4708 BuildMI(MBB, I, DL, get(AArch64::LDRQpost)) in copyPhysReg()
4709 .addReg(AArch64::SP, RegState::Define) in copyPhysReg()
4711 .addReg(AArch64::SP) in copyPhysReg()
4717 if (AArch64::FPR64RegClass.contains(DestReg) && in copyPhysReg()
4718 AArch64::FPR64RegClass.contains(SrcReg)) { in copyPhysReg()
4719 BuildMI(MBB, I, DL, get(AArch64::FMOVDr), DestReg) in copyPhysReg()
4724 if (AArch64::FPR32RegClass.contains(DestReg) && in copyPhysReg()
4725 AArch64::FPR32RegClass.contains(SrcReg)) { in copyPhysReg()
4726 BuildMI(MBB, I, DL, get(AArch64::FMOVSr), DestReg) in copyPhysReg()
4731 if (AArch64::FPR16RegClass.contains(DestReg) && in copyPhysReg()
4732 AArch64::FPR16RegClass.contains(SrcReg)) { in copyPhysReg()
4734 RI.getMatchingSuperReg(DestReg, AArch64::hsub, &AArch64::FPR32RegClass); in copyPhysReg()
4736 RI.getMatchingSuperReg(SrcReg, AArch64::hsub, &AArch64::FPR32RegClass); in copyPhysReg()
4737 BuildMI(MBB, I, DL, get(AArch64::FMOVSr), DestReg) in copyPhysReg()
4742 if (AArch64::FPR8RegClass.contains(DestReg) && in copyPhysReg()
4743 AArch64::FPR8RegClass.contains(SrcReg)) { in copyPhysReg()
4745 RI.getMatchingSuperReg(DestReg, AArch64::bsub, &AArch64::FPR32RegClass); in copyPhysReg()
4747 RI.getMatchingSuperReg(SrcReg, AArch64::bsub, &AArch64::FPR32RegClass); in copyPhysReg()
4748 BuildMI(MBB, I, DL, get(AArch64::FMOVSr), DestReg) in copyPhysReg()
4754 if (AArch64::FPR64RegClass.contains(DestReg) && in copyPhysReg()
4755 AArch64::GPR64RegClass.contains(SrcReg)) { in copyPhysReg()
4756 BuildMI(MBB, I, DL, get(AArch64::FMOVXDr), DestReg) in copyPhysReg()
4760 if (AArch64::GPR64RegClass.contains(DestReg) && in copyPhysReg()
4761 AArch64::FPR64RegClass.contains(SrcReg)) { in copyPhysReg()
4762 BuildMI(MBB, I, DL, get(AArch64::FMOVDXr), DestReg) in copyPhysReg()
4767 if (AArch64::FPR32RegClass.contains(DestReg) && in copyPhysReg()
4768 AArch64::GPR32RegClass.contains(SrcReg)) { in copyPhysReg()
4769 BuildMI(MBB, I, DL, get(AArch64::FMOVWSr), DestReg) in copyPhysReg()
4773 if (AArch64::GPR32RegClass.contains(DestReg) && in copyPhysReg()
4774 AArch64::FPR32RegClass.contains(SrcReg)) { in copyPhysReg()
4775 BuildMI(MBB, I, DL, get(AArch64::FMOVSWr), DestReg) in copyPhysReg()
4780 if (DestReg == AArch64::NZCV) { in copyPhysReg()
4781 assert(AArch64::GPR64RegClass.contains(SrcReg) && "Invalid NZCV copy"); in copyPhysReg()
4782 BuildMI(MBB, I, DL, get(AArch64::MSR)) in copyPhysReg()
4785 .addReg(AArch64::NZCV, RegState::Implicit | RegState::Define); in copyPhysReg()
4789 if (SrcReg == AArch64::NZCV) { in copyPhysReg()
4790 assert(AArch64::GPR64RegClass.contains(DestReg) && "Invalid NZCV copy"); in copyPhysReg()
4791 BuildMI(MBB, I, DL, get(AArch64::MRS), DestReg) in copyPhysReg()
4793 .addReg(AArch64::NZCV, RegState::Implicit | getKillRegState(KillSrc)); in copyPhysReg()
4847 if (AArch64::FPR8RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
4848 Opc = AArch64::STRBui; in storeRegToStackSlot()
4851 if (AArch64::FPR16RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
4852 Opc = AArch64::STRHui; in storeRegToStackSlot()
4853 else if (AArch64::PNRRegClass.hasSubClassEq(RC) || in storeRegToStackSlot()
4854 AArch64::PPRRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4857 Opc = AArch64::STR_PXI; in storeRegToStackSlot()
4863 if (AArch64::GPR32allRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4864 Opc = AArch64::STRWui; in storeRegToStackSlot()
4866 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR32RegClass); in storeRegToStackSlot()
4868 assert(SrcReg != AArch64::WSP); in storeRegToStackSlot()
4869 } else if (AArch64::FPR32RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
4870 Opc = AArch64::STRSui; in storeRegToStackSlot()
4871 else if (AArch64::PPR2RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4872 Opc = AArch64::STR_PPXI; in storeRegToStackSlot()
4877 if (AArch64::GPR64allRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4878 Opc = AArch64::STRXui; in storeRegToStackSlot()
4880 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass); in storeRegToStackSlot()
4882 assert(SrcReg != AArch64::SP); in storeRegToStackSlot()
4883 } else if (AArch64::FPR64RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4884 Opc = AArch64::STRDui; in storeRegToStackSlot()
4885 } else if (AArch64::WSeqPairsClassRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4887 get(AArch64::STPWi), SrcReg, isKill, in storeRegToStackSlot()
4888 AArch64::sube32, AArch64::subo32, FI, MMO); in storeRegToStackSlot()
4893 if (AArch64::FPR128RegClass.hasSubClassEq(RC)) in storeRegToStackSlot()
4894 Opc = AArch64::STRQui; in storeRegToStackSlot()
4895 else if (AArch64::DDRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4897 Opc = AArch64::ST1Twov1d; in storeRegToStackSlot()
4899 } else if (AArch64::XSeqPairsClassRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4901 get(AArch64::STPXi), SrcReg, isKill, in storeRegToStackSlot()
4902 AArch64::sube64, AArch64::subo64, FI, MMO); in storeRegToStackSlot()
4904 } else if (AArch64::ZPRRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4907 Opc = AArch64::STR_ZXI; in storeRegToStackSlot()
4912 if (AArch64::DDDRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4914 Opc = AArch64::ST1Threev1d; in storeRegToStackSlot()
4919 if (AArch64::DDDDRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4921 Opc = AArch64::ST1Fourv1d; in storeRegToStackSlot()
4923 } else if (AArch64::QQRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4925 Opc = AArch64::ST1Twov2d; in storeRegToStackSlot()
4927 } else if (AArch64::ZPR2RegClass.hasSubClassEq(RC) || in storeRegToStackSlot()
4928 AArch64::ZPR2StridedOrContiguousRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4931 Opc = AArch64::STR_ZZXI; in storeRegToStackSlot()
4936 if (AArch64::QQQRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4938 Opc = AArch64::ST1Threev2d; in storeRegToStackSlot()
4940 } else if (AArch64::ZPR3RegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4943 Opc = AArch64::STR_ZZZXI; in storeRegToStackSlot()
4948 if (AArch64::QQQQRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4950 Opc = AArch64::ST1Fourv2d; in storeRegToStackSlot()
4952 } else if (AArch64::ZPR4RegClass.hasSubClassEq(RC) || in storeRegToStackSlot()
4953 AArch64::ZPR4StridedOrContiguousRegClass.hasSubClassEq(RC)) { in storeRegToStackSlot()
4956 Opc = AArch64::STR_ZZZZXI; in storeRegToStackSlot()
5019 if (AArch64::FPR8RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
5020 Opc = AArch64::LDRBui; in loadRegFromStackSlot()
5023 bool IsPNR = AArch64::PNRRegClass.hasSubClassEq(RC); in loadRegFromStackSlot()
5024 if (AArch64::FPR16RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
5025 Opc = AArch64::LDRHui; in loadRegFromStackSlot()
5026 else if (IsPNR || AArch64::PPRRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5031 Opc = AArch64::LDR_PXI; in loadRegFromStackSlot()
5037 if (AArch64::GPR32allRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5038 Opc = AArch64::LDRWui; in loadRegFromStackSlot()
5040 MF.getRegInfo().constrainRegClass(DestReg, &AArch64::GPR32RegClass); in loadRegFromStackSlot()
5042 assert(DestReg != AArch64::WSP); in loadRegFromStackSlot()
5043 } else if (AArch64::FPR32RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
5044 Opc = AArch64::LDRSui; in loadRegFromStackSlot()
5045 else if (AArch64::PPR2RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5046 Opc = AArch64::LDR_PPXI; in loadRegFromStackSlot()
5051 if (AArch64::GPR64allRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5052 Opc = AArch64::LDRXui; in loadRegFromStackSlot()
5054 MF.getRegInfo().constrainRegClass(DestReg, &AArch64::GPR64RegClass); in loadRegFromStackSlot()
5056 assert(DestReg != AArch64::SP); in loadRegFromStackSlot()
5057 } else if (AArch64::FPR64RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5058 Opc = AArch64::LDRDui; in loadRegFromStackSlot()
5059 } else if (AArch64::WSeqPairsClassRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5061 get(AArch64::LDPWi), DestReg, AArch64::sube32, in loadRegFromStackSlot()
5062 AArch64::subo32, FI, MMO); in loadRegFromStackSlot()
5067 if (AArch64::FPR128RegClass.hasSubClassEq(RC)) in loadRegFromStackSlot()
5068 Opc = AArch64::LDRQui; in loadRegFromStackSlot()
5069 else if (AArch64::DDRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5071 Opc = AArch64::LD1Twov1d; in loadRegFromStackSlot()
5073 } else if (AArch64::XSeqPairsClassRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5075 get(AArch64::LDPXi), DestReg, AArch64::sube64, in loadRegFromStackSlot()
5076 AArch64::subo64, FI, MMO); in loadRegFromStackSlot()
5078 } else if (AArch64::ZPRRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5081 Opc = AArch64::LDR_ZXI; in loadRegFromStackSlot()
5086 if (AArch64::DDDRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5088 Opc = AArch64::LD1Threev1d; in loadRegFromStackSlot()
5093 if (AArch64::DDDDRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5095 Opc = AArch64::LD1Fourv1d; in loadRegFromStackSlot()
5097 } else if (AArch64::QQRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5099 Opc = AArch64::LD1Twov2d; in loadRegFromStackSlot()
5101 } else if (AArch64::ZPR2RegClass.hasSubClassEq(RC) || in loadRegFromStackSlot()
5102 AArch64::ZPR2StridedOrContiguousRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5105 Opc = AArch64::LDR_ZZXI; in loadRegFromStackSlot()
5110 if (AArch64::QQQRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5112 Opc = AArch64::LD1Threev2d; in loadRegFromStackSlot()
5114 } else if (AArch64::ZPR3RegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5117 Opc = AArch64::LDR_ZZZXI; in loadRegFromStackSlot()
5122 if (AArch64::QQQQRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5124 Opc = AArch64::LD1Fourv2d; in loadRegFromStackSlot()
5126 } else if (AArch64::ZPR4RegClass.hasSubClassEq(RC) || in loadRegFromStackSlot()
5127 AArch64::ZPR4StridedOrContiguousRegClass.hasSubClassEq(RC)) { in loadRegFromStackSlot()
5130 Opc = AArch64::LDR_ZZZZXI; in loadRegFromStackSlot()
5155 return I.modifiesRegister(AArch64::NZCV, TRI) || in isNZCVTouchedInInstructionRange()
5156 I.readsRegister(AArch64::NZCV, TRI); in isNZCVTouchedInInstructionRange()
5202 // Expr + NumBytes + NumVGScaledBytes * AArch64::VG
5242 if (Reg == AArch64::SP) in createDefCFAExpression()
5244 else if (Reg == AArch64::FP) in createDefCFAExpression()
5249 // Build up the expression (Reg + NumBytes + NumVGScaledBytes * AArch64::VG) in createDefCFAExpression()
5255 TRI.getDwarfRegNum(AArch64::VG, true), Comment); in createDefCFAExpression()
5298 // Build up expression (NumBytes + NumVGScaledBytes * AArch64::VG) in createCFAOffset()
5301 TRI.getDwarfRegNum(AArch64::VG, true), Comment); in createCFAOffset()
5329 case AArch64::ADDXri: in emitFrameOffsetAdj()
5330 case AArch64::ADDSXri: in emitFrameOffsetAdj()
5331 case AArch64::SUBXri: in emitFrameOffsetAdj()
5332 case AArch64::SUBSXri: in emitFrameOffsetAdj()
5336 case AArch64::ADDVL_XXI: in emitFrameOffsetAdj()
5337 case AArch64::ADDPL_XXI: in emitFrameOffsetAdj()
5338 case AArch64::ADDSVL_XXI: in emitFrameOffsetAdj()
5339 case AArch64::ADDSPL_XXI: in emitFrameOffsetAdj()
5354 if (Opc == AArch64::ADDVL_XXI || Opc == AArch64::ADDSVL_XXI) in emitFrameOffsetAdj()
5356 else if (Opc == AArch64::ADDPL_XXI || Opc == AArch64::ADDSPL_XXI) in emitFrameOffsetAdj()
5372 if (TmpReg == AArch64::XZR) in emitFrameOffsetAdj()
5374 &AArch64::GPR64RegClass); in emitFrameOffsetAdj()
5400 if (Sign == -1 || Opc == AArch64::SUBXri || Opc == AArch64::SUBSXri) in emitFrameOffsetAdj()
5419 if ((DestReg == AArch64::FP && SrcReg == AArch64::SP) || in emitFrameOffsetAdj()
5420 (SrcReg == AArch64::FP && DestReg == AArch64::SP)) { in emitFrameOffsetAdj()
5424 BuildMI(MBB, MBBI, DL, TII->get(AArch64::SEH_SetFP)).setMIFlag(Flag); in emitFrameOffsetAdj()
5426 BuildMI(MBB, MBBI, DL, TII->get(AArch64::SEH_AddFP)) in emitFrameOffsetAdj()
5431 } else if (DestReg == AArch64::SP) { in emitFrameOffsetAdj()
5434 assert(SrcReg == AArch64::SP && "Unexpected SrcReg for SEH_StackAlloc"); in emitFrameOffsetAdj()
5435 BuildMI(MBB, MBBI, DL, TII->get(AArch64::SEH_StackAlloc)) in emitFrameOffsetAdj()
5467 assert((DestReg != AArch64::SP || Bytes % 8 == 0) && in emitFrameOffset()
5469 unsigned Opc = SetNZCV ? AArch64::ADDSXri : AArch64::ADDXri; in emitFrameOffset()
5472 Opc = SetNZCV ? AArch64::SUBSXri : AArch64::SUBXri; in emitFrameOffset()
5477 CFAOffset += (Opc == AArch64::ADDXri || Opc == AArch64::ADDSXri) in emitFrameOffset()
5491 UseSVL ? AArch64::ADDSVL_XXI : AArch64::ADDVL_XXI, in emitFrameOffset()
5499 assert(DestReg != AArch64::SP && "Unaligned access to SP"); in emitFrameOffset()
5501 UseSVL ? AArch64::ADDSPL_XXI : AArch64::ADDPL_XXI, in emitFrameOffset()
5524 if (SrcReg == AArch64::SP && DstReg.isVirtual()) { in foldMemoryOperandImpl()
5525 MF.getRegInfo().constrainRegClass(DstReg, &AArch64::GPR64RegClass); in foldMemoryOperandImpl()
5528 if (DstReg == AArch64::SP && SrcReg.isVirtual()) { in foldMemoryOperandImpl()
5529 MF.getRegInfo().constrainRegClass(SrcReg, &AArch64::GPR64RegClass); in foldMemoryOperandImpl()
5533 if (SrcReg == AArch64::NZCV || DstReg == AArch64::NZCV) in foldMemoryOperandImpl()
5602 if (IsSpill && DstMO.isUndef() && SrcReg == AArch64::WZR && in foldMemoryOperandImpl()
5606 storeRegToStackSlot(MBB, InsertPt, AArch64::XZR, SrcMO.isKill(), in foldMemoryOperandImpl()
5607 FrameIndex, &AArch64::GPR64RegClass, &TRI, in foldMemoryOperandImpl()
5627 case AArch64::sub_32: in foldMemoryOperandImpl()
5628 FillRC = &AArch64::GPR32RegClass; in foldMemoryOperandImpl()
5630 case AArch64::ssub: in foldMemoryOperandImpl()
5631 FillRC = &AArch64::FPR32RegClass; in foldMemoryOperandImpl()
5633 case AArch64::dsub: in foldMemoryOperandImpl()
5634 FillRC = &AArch64::FPR64RegClass; in foldMemoryOperandImpl()
5676 case AArch64::LD1Rv1d: in isAArch64FrameOffsetLegal()
5677 case AArch64::LD1Rv2s: in isAArch64FrameOffsetLegal()
5678 case AArch64::LD1Rv2d: in isAArch64FrameOffsetLegal()
5679 case AArch64::LD1Rv4h: in isAArch64FrameOffsetLegal()
5680 case AArch64::LD1Rv4s: in isAArch64FrameOffsetLegal()
5681 case AArch64::LD1Rv8b: in isAArch64FrameOffsetLegal()
5682 case AArch64::LD1Rv8h: in isAArch64FrameOffsetLegal()
5683 case AArch64::LD1Rv16b: in isAArch64FrameOffsetLegal()
5684 case AArch64::LD1Twov2d: in isAArch64FrameOffsetLegal()
5685 case AArch64::LD1Threev2d: in isAArch64FrameOffsetLegal()
5686 case AArch64::LD1Fourv2d: in isAArch64FrameOffsetLegal()
5687 case AArch64::LD1Twov1d: in isAArch64FrameOffsetLegal()
5688 case AArch64::LD1Threev1d: in isAArch64FrameOffsetLegal()
5689 case AArch64::LD1Fourv1d: in isAArch64FrameOffsetLegal()
5690 case AArch64::ST1Twov2d: in isAArch64FrameOffsetLegal()
5691 case AArch64::ST1Threev2d: in isAArch64FrameOffsetLegal()
5692 case AArch64::ST1Fourv2d: in isAArch64FrameOffsetLegal()
5693 case AArch64::ST1Twov1d: in isAArch64FrameOffsetLegal()
5694 case AArch64::ST1Threev1d: in isAArch64FrameOffsetLegal()
5695 case AArch64::ST1Fourv1d: in isAArch64FrameOffsetLegal()
5696 case AArch64::ST1i8: in isAArch64FrameOffsetLegal()
5697 case AArch64::ST1i16: in isAArch64FrameOffsetLegal()
5698 case AArch64::ST1i32: in isAArch64FrameOffsetLegal()
5699 case AArch64::ST1i64: in isAArch64FrameOffsetLegal()
5700 case AArch64::IRG: in isAArch64FrameOffsetLegal()
5701 case AArch64::IRGstack: in isAArch64FrameOffsetLegal()
5702 case AArch64::STGloop: in isAArch64FrameOffsetLegal()
5703 case AArch64::STZGloop: in isAArch64FrameOffsetLegal()
5772 if (Opcode == AArch64::ADDSXri || Opcode == AArch64::ADDXri) { in rewriteAArch64FrameIndex()
5776 MachineInstr::NoFlags, (Opcode == AArch64::ADDSXri)); in rewriteAArch64FrameIndex()
5804 BuildMI(MBB, MI, DL, get(AArch64::HINT)).addImm(0); in insertNoop()
5808 return MCInstBuilder(AArch64::HINT).addImm(0); in getNop()
5811 // AArch64 supports MachineCombiner.
5817 case AArch64::ADDSWrr: in isCombineInstrSettingFlag()
5818 case AArch64::ADDSWri: in isCombineInstrSettingFlag()
5819 case AArch64::ADDSXrr: in isCombineInstrSettingFlag()
5820 case AArch64::ADDSXri: in isCombineInstrSettingFlag()
5821 case AArch64::SUBSWrr: in isCombineInstrSettingFlag()
5822 case AArch64::SUBSXrr: in isCombineInstrSettingFlag()
5824 case AArch64::SUBSWri: in isCombineInstrSettingFlag()
5825 case AArch64::SUBSXri: in isCombineInstrSettingFlag()
5836 case AArch64::ADDWrr: in isCombineInstrCandidate32()
5837 case AArch64::ADDWri: in isCombineInstrCandidate32()
5838 case AArch64::SUBWrr: in isCombineInstrCandidate32()
5839 case AArch64::ADDSWrr: in isCombineInstrCandidate32()
5840 case AArch64::ADDSWri: in isCombineInstrCandidate32()
5841 case AArch64::SUBSWrr: in isCombineInstrCandidate32()
5843 case AArch64::SUBWri: in isCombineInstrCandidate32()
5844 case AArch64::SUBSWri: in isCombineInstrCandidate32()
5855 case AArch64::ADDXrr: in isCombineInstrCandidate64()
5856 case AArch64::ADDXri: in isCombineInstrCandidate64()
5857 case AArch64::SUBXrr: in isCombineInstrCandidate64()
5858 case AArch64::ADDSXrr: in isCombineInstrCandidate64()
5859 case AArch64::ADDSXri: in isCombineInstrCandidate64()
5860 case AArch64::SUBSXrr: in isCombineInstrCandidate64()
5862 case AArch64::SUBXri: in isCombineInstrCandidate64()
5863 case AArch64::SUBSXri: in isCombineInstrCandidate64()
5864 case AArch64::ADDv8i8: in isCombineInstrCandidate64()
5865 case AArch64::ADDv16i8: in isCombineInstrCandidate64()
5866 case AArch64::ADDv4i16: in isCombineInstrCandidate64()
5867 case AArch64::ADDv8i16: in isCombineInstrCandidate64()
5868 case AArch64::ADDv2i32: in isCombineInstrCandidate64()
5869 case AArch64::ADDv4i32: in isCombineInstrCandidate64()
5870 case AArch64::SUBv8i8: in isCombineInstrCandidate64()
5871 case AArch64::SUBv16i8: in isCombineInstrCandidate64()
5872 case AArch64::SUBv4i16: in isCombineInstrCandidate64()
5873 case AArch64::SUBv8i16: in isCombineInstrCandidate64()
5874 case AArch64::SUBv2i32: in isCombineInstrCandidate64()
5875 case AArch64::SUBv4i32: in isCombineInstrCandidate64()
5888 case AArch64::FADDHrr: in isCombineInstrCandidateFP()
5889 case AArch64::FADDSrr: in isCombineInstrCandidateFP()
5890 case AArch64::FADDDrr: in isCombineInstrCandidateFP()
5891 case AArch64::FADDv4f16: in isCombineInstrCandidateFP()
5892 case AArch64::FADDv8f16: in isCombineInstrCandidateFP()
5893 case AArch64::FADDv2f32: in isCombineInstrCandidateFP()
5894 case AArch64::FADDv2f64: in isCombineInstrCandidateFP()
5895 case AArch64::FADDv4f32: in isCombineInstrCandidateFP()
5896 case AArch64::FSUBHrr: in isCombineInstrCandidateFP()
5897 case AArch64::FSUBSrr: in isCombineInstrCandidateFP()
5898 case AArch64::FSUBDrr: in isCombineInstrCandidateFP()
5899 case AArch64::FSUBv4f16: in isCombineInstrCandidateFP()
5900 case AArch64::FSUBv8f16: in isCombineInstrCandidateFP()
5901 case AArch64::FSUBv2f32: in isCombineInstrCandidateFP()
5902 case AArch64::FSUBv2f64: in isCombineInstrCandidateFP()
5903 case AArch64::FSUBv4f32: in isCombineInstrCandidateFP()
5948 MI->findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, true) == -1) in canCombine()
5979 case AArch64::FADDHrr: in isAssociativeAndCommutative()
5980 case AArch64::FADDSrr: in isAssociativeAndCommutative()
5981 case AArch64::FADDDrr: in isAssociativeAndCommutative()
5982 case AArch64::FMULHrr: in isAssociativeAndCommutative()
5983 case AArch64::FMULSrr: in isAssociativeAndCommutative()
5984 case AArch64::FMULDrr: in isAssociativeAndCommutative()
5985 case AArch64::FMULX16: in isAssociativeAndCommutative()
5986 case AArch64::FMULX32: in isAssociativeAndCommutative()
5987 case AArch64::FMULX64: in isAssociativeAndCommutative()
5989 case AArch64::FADDv4f16: in isAssociativeAndCommutative()
5990 case AArch64::FADDv8f16: in isAssociativeAndCommutative()
5991 case AArch64::FADDv2f32: in isAssociativeAndCommutative()
5992 case AArch64::FADDv4f32: in isAssociativeAndCommutative()
5993 case AArch64::FADDv2f64: in isAssociativeAndCommutative()
5994 case AArch64::FMULv4f16: in isAssociativeAndCommutative()
5995 case AArch64::FMULv8f16: in isAssociativeAndCommutative()
5996 case AArch64::FMULv2f32: in isAssociativeAndCommutative()
5997 case AArch64::FMULv4f32: in isAssociativeAndCommutative()
5998 case AArch64::FMULv2f64: in isAssociativeAndCommutative()
5999 case AArch64::FMULXv4f16: in isAssociativeAndCommutative()
6000 case AArch64::FMULXv8f16: in isAssociativeAndCommutative()
6001 case AArch64::FMULXv2f32: in isAssociativeAndCommutative()
6002 case AArch64::FMULXv4f32: in isAssociativeAndCommutative()
6003 case AArch64::FMULXv2f64: in isAssociativeAndCommutative()
6007 case AArch64::FADD_ZZZ_H: in isAssociativeAndCommutative()
6008 case AArch64::FADD_ZZZ_S: in isAssociativeAndCommutative()
6009 case AArch64::FADD_ZZZ_D: in isAssociativeAndCommutative()
6010 case AArch64::FMUL_ZZZ_H: in isAssociativeAndCommutative()
6011 case AArch64::FMUL_ZZZ_S: in isAssociativeAndCommutative()
6012 case AArch64::FMUL_ZZZ_D: in isAssociativeAndCommutative()
6024 case AArch64::ADDWrr: in isAssociativeAndCommutative()
6025 case AArch64::ADDXrr: in isAssociativeAndCommutative()
6026 case AArch64::ANDWrr: in isAssociativeAndCommutative()
6027 case AArch64::ANDXrr: in isAssociativeAndCommutative()
6028 case AArch64::ORRWrr: in isAssociativeAndCommutative()
6029 case AArch64::ORRXrr: in isAssociativeAndCommutative()
6030 case AArch64::EORWrr: in isAssociativeAndCommutative()
6031 case AArch64::EORXrr: in isAssociativeAndCommutative()
6032 case AArch64::EONWrr: in isAssociativeAndCommutative()
6033 case AArch64::EONXrr: in isAssociativeAndCommutative()
6037 case AArch64::ADDv8i8: in isAssociativeAndCommutative()
6038 case AArch64::ADDv16i8: in isAssociativeAndCommutative()
6039 case AArch64::ADDv4i16: in isAssociativeAndCommutative()
6040 case AArch64::ADDv8i16: in isAssociativeAndCommutative()
6041 case AArch64::ADDv2i32: in isAssociativeAndCommutative()
6042 case AArch64::ADDv4i32: in isAssociativeAndCommutative()
6043 case AArch64::ADDv1i64: in isAssociativeAndCommutative()
6044 case AArch64::ADDv2i64: in isAssociativeAndCommutative()
6045 case AArch64::MULv8i8: in isAssociativeAndCommutative()
6046 case AArch64::MULv16i8: in isAssociativeAndCommutative()
6047 case AArch64::MULv4i16: in isAssociativeAndCommutative()
6048 case AArch64::MULv8i16: in isAssociativeAndCommutative()
6049 case AArch64::MULv2i32: in isAssociativeAndCommutative()
6050 case AArch64::MULv4i32: in isAssociativeAndCommutative()
6051 case AArch64::ANDv8i8: in isAssociativeAndCommutative()
6052 case AArch64::ANDv16i8: in isAssociativeAndCommutative()
6053 case AArch64::ORRv8i8: in isAssociativeAndCommutative()
6054 case AArch64::ORRv16i8: in isAssociativeAndCommutative()
6055 case AArch64::EORv8i8: in isAssociativeAndCommutative()
6056 case AArch64::EORv16i8: in isAssociativeAndCommutative()
6058 case AArch64::ADD_ZZZ_B: in isAssociativeAndCommutative()
6059 case AArch64::ADD_ZZZ_H: in isAssociativeAndCommutative()
6060 case AArch64::ADD_ZZZ_S: in isAssociativeAndCommutative()
6061 case AArch64::ADD_ZZZ_D: in isAssociativeAndCommutative()
6062 case AArch64::MUL_ZZZ_B: in isAssociativeAndCommutative()
6063 case AArch64::MUL_ZZZ_H: in isAssociativeAndCommutative()
6064 case AArch64::MUL_ZZZ_S: in isAssociativeAndCommutative()
6065 case AArch64::MUL_ZZZ_D: in isAssociativeAndCommutative()
6066 case AArch64::AND_ZZZ: in isAssociativeAndCommutative()
6067 case AArch64::ORR_ZZZ: in isAssociativeAndCommutative()
6068 case AArch64::EOR_ZZZ: in isAssociativeAndCommutative()
6087 Root.findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, true); in getMaddPatterns()
6119 case AArch64::ADDWrr: in getMaddPatterns()
6122 setFound(AArch64::MADDWrrr, 1, AArch64::WZR, MCP::MULADDW_OP1); in getMaddPatterns()
6123 setFound(AArch64::MADDWrrr, 2, AArch64::WZR, MCP::MULADDW_OP2); in getMaddPatterns()
6125 case AArch64::ADDXrr: in getMaddPatterns()
6126 setFound(AArch64::MADDXrrr, 1, AArch64::XZR, MCP::MULADDX_OP1); in getMaddPatterns()
6127 setFound(AArch64::MADDXrrr, 2, AArch64::XZR, MCP::MULADDX_OP2); in getMaddPatterns()
6129 case AArch64::SUBWrr: in getMaddPatterns()
6130 setFound(AArch64::MADDWrrr, 2, AArch64::WZR, MCP::MULSUBW_OP2); in getMaddPatterns()
6131 setFound(AArch64::MADDWrrr, 1, AArch64::WZR, MCP::MULSUBW_OP1); in getMaddPatterns()
6133 case AArch64::SUBXrr: in getMaddPatterns()
6134 setFound(AArch64::MADDXrrr, 2, AArch64::XZR, MCP::MULSUBX_OP2); in getMaddPatterns()
6135 setFound(AArch64::MADDXrrr, 1, AArch64::XZR, MCP::MULSUBX_OP1); in getMaddPatterns()
6137 case AArch64::ADDWri: in getMaddPatterns()
6138 setFound(AArch64::MADDWrrr, 1, AArch64::WZR, MCP::MULADDWI_OP1); in getMaddPatterns()
6140 case AArch64::ADDXri: in getMaddPatterns()
6141 setFound(AArch64::MADDXrrr, 1, AArch64::XZR, MCP::MULADDXI_OP1); in getMaddPatterns()
6143 case AArch64::SUBWri: in getMaddPatterns()
6144 setFound(AArch64::MADDWrrr, 1, AArch64::WZR, MCP::MULSUBWI_OP1); in getMaddPatterns()
6146 case AArch64::SUBXri: in getMaddPatterns()
6147 setFound(AArch64::MADDXrrr, 1, AArch64::XZR, MCP::MULSUBXI_OP1); in getMaddPatterns()
6149 case AArch64::ADDv8i8: in getMaddPatterns()
6150 setVFound(AArch64::MULv8i8, 1, MCP::MULADDv8i8_OP1); in getMaddPatterns()
6151 setVFound(AArch64::MULv8i8, 2, MCP::MULADDv8i8_OP2); in getMaddPatterns()
6153 case AArch64::ADDv16i8: in getMaddPatterns()
6154 setVFound(AArch64::MULv16i8, 1, MCP::MULADDv16i8_OP1); in getMaddPatterns()
6155 setVFound(AArch64::MULv16i8, 2, MCP::MULADDv16i8_OP2); in getMaddPatterns()
6157 case AArch64::ADDv4i16: in getMaddPatterns()
6158 setVFound(AArch64::MULv4i16, 1, MCP::MULADDv4i16_OP1); in getMaddPatterns()
6159 setVFound(AArch64::MULv4i16, 2, MCP::MULADDv4i16_OP2); in getMaddPatterns()
6160 setVFound(AArch64::MULv4i16_indexed, 1, MCP::MULADDv4i16_indexed_OP1); in getMaddPatterns()
6161 setVFound(AArch64::MULv4i16_indexed, 2, MCP::MULADDv4i16_indexed_OP2); in getMaddPatterns()
6163 case AArch64::ADDv8i16: in getMaddPatterns()
6164 setVFound(AArch64::MULv8i16, 1, MCP::MULADDv8i16_OP1); in getMaddPatterns()
6165 setVFound(AArch64::MULv8i16, 2, MCP::MULADDv8i16_OP2); in getMaddPatterns()
6166 setVFound(AArch64::MULv8i16_indexed, 1, MCP::MULADDv8i16_indexed_OP1); in getMaddPatterns()
6167 setVFound(AArch64::MULv8i16_indexed, 2, MCP::MULADDv8i16_indexed_OP2); in getMaddPatterns()
6169 case AArch64::ADDv2i32: in getMaddPatterns()
6170 setVFound(AArch64::MULv2i32, 1, MCP::MULADDv2i32_OP1); in getMaddPatterns()
6171 setVFound(AArch64::MULv2i32, 2, MCP::MULADDv2i32_OP2); in getMaddPatterns()
6172 setVFound(AArch64::MULv2i32_indexed, 1, MCP::MULADDv2i32_indexed_OP1); in getMaddPatterns()
6173 setVFound(AArch64::MULv2i32_indexed, 2, MCP::MULADDv2i32_indexed_OP2); in getMaddPatterns()
6175 case AArch64::ADDv4i32: in getMaddPatterns()
6176 setVFound(AArch64::MULv4i32, 1, MCP::MULADDv4i32_OP1); in getMaddPatterns()
6177 setVFound(AArch64::MULv4i32, 2, MCP::MULADDv4i32_OP2); in getMaddPatterns()
6178 setVFound(AArch64::MULv4i32_indexed, 1, MCP::MULADDv4i32_indexed_OP1); in getMaddPatterns()
6179 setVFound(AArch64::MULv4i32_indexed, 2, MCP::MULADDv4i32_indexed_OP2); in getMaddPatterns()
6181 case AArch64::SUBv8i8: in getMaddPatterns()
6182 setVFound(AArch64::MULv8i8, 1, MCP::MULSUBv8i8_OP1); in getMaddPatterns()
6183 setVFound(AArch64::MULv8i8, 2, MCP::MULSUBv8i8_OP2); in getMaddPatterns()
6185 case AArch64::SUBv16i8: in getMaddPatterns()
6186 setVFound(AArch64::MULv16i8, 1, MCP::MULSUBv16i8_OP1); in getMaddPatterns()
6187 setVFound(AArch64::MULv16i8, 2, MCP::MULSUBv16i8_OP2); in getMaddPatterns()
6189 case AArch64::SUBv4i16: in getMaddPatterns()
6190 setVFound(AArch64::MULv4i16, 1, MCP::MULSUBv4i16_OP1); in getMaddPatterns()
6191 setVFound(AArch64::MULv4i16, 2, MCP::MULSUBv4i16_OP2); in getMaddPatterns()
6192 setVFound(AArch64::MULv4i16_indexed, 1, MCP::MULSUBv4i16_indexed_OP1); in getMaddPatterns()
6193 setVFound(AArch64::MULv4i16_indexed, 2, MCP::MULSUBv4i16_indexed_OP2); in getMaddPatterns()
6195 case AArch64::SUBv8i16: in getMaddPatterns()
6196 setVFound(AArch64::MULv8i16, 1, MCP::MULSUBv8i16_OP1); in getMaddPatterns()
6197 setVFound(AArch64::MULv8i16, 2, MCP::MULSUBv8i16_OP2); in getMaddPatterns()
6198 setVFound(AArch64::MULv8i16_indexed, 1, MCP::MULSUBv8i16_indexed_OP1); in getMaddPatterns()
6199 setVFound(AArch64::MULv8i16_indexed, 2, MCP::MULSUBv8i16_indexed_OP2); in getMaddPatterns()
6201 case AArch64::SUBv2i32: in getMaddPatterns()
6202 setVFound(AArch64::MULv2i32, 1, MCP::MULSUBv2i32_OP1); in getMaddPatterns()
6203 setVFound(AArch64::MULv2i32, 2, MCP::MULSUBv2i32_OP2); in getMaddPatterns()
6204 setVFound(AArch64::MULv2i32_indexed, 1, MCP::MULSUBv2i32_indexed_OP1); in getMaddPatterns()
6205 setVFound(AArch64::MULv2i32_indexed, 2, MCP::MULSUBv2i32_indexed_OP2); in getMaddPatterns()
6207 case AArch64::SUBv4i32: in getMaddPatterns()
6208 setVFound(AArch64::MULv4i32, 1, MCP::MULSUBv4i32_OP1); in getMaddPatterns()
6209 setVFound(AArch64::MULv4i32, 2, MCP::MULSUBv4i32_OP2); in getMaddPatterns()
6210 setVFound(AArch64::MULv4i32_indexed, 1, MCP::MULSUBv4i32_indexed_OP1); in getMaddPatterns()
6211 setVFound(AArch64::MULv4i32_indexed, 2, MCP::MULSUBv4i32_indexed_OP2); in getMaddPatterns()
6242 case AArch64::FADDHrr: in getFMAPatterns()
6246 Found = Match(AArch64::FMULHrr, 1, MCP::FMULADDH_OP1); in getFMAPatterns()
6247 Found |= Match(AArch64::FMULHrr, 2, MCP::FMULADDH_OP2); in getFMAPatterns()
6249 case AArch64::FADDSrr: in getFMAPatterns()
6253 Found |= Match(AArch64::FMULSrr, 1, MCP::FMULADDS_OP1) || in getFMAPatterns()
6254 Match(AArch64::FMULv1i32_indexed, 1, MCP::FMLAv1i32_indexed_OP1); in getFMAPatterns()
6256 Found |= Match(AArch64::FMULSrr, 2, MCP::FMULADDS_OP2) || in getFMAPatterns()
6257 Match(AArch64::FMULv1i32_indexed, 2, MCP::FMLAv1i32_indexed_OP2); in getFMAPatterns()
6259 case AArch64::FADDDrr: in getFMAPatterns()
6260 Found |= Match(AArch64::FMULDrr, 1, MCP::FMULADDD_OP1) || in getFMAPatterns()
6261 Match(AArch64::FMULv1i64_indexed, 1, MCP::FMLAv1i64_indexed_OP1); in getFMAPatterns()
6263 Found |= Match(AArch64::FMULDrr, 2, MCP::FMULADDD_OP2) || in getFMAPatterns()
6264 Match(AArch64::FMULv1i64_indexed, 2, MCP::FMLAv1i64_indexed_OP2); in getFMAPatterns()
6266 case AArch64::FADDv4f16: in getFMAPatterns()
6267 Found |= Match(AArch64::FMULv4i16_indexed, 1, MCP::FMLAv4i16_indexed_OP1) || in getFMAPatterns()
6268 Match(AArch64::FMULv4f16, 1, MCP::FMLAv4f16_OP1); in getFMAPatterns()
6270 Found |= Match(AArch64::FMULv4i16_indexed, 2, MCP::FMLAv4i16_indexed_OP2) || in getFMAPatterns()
6271 Match(AArch64::FMULv4f16, 2, MCP::FMLAv4f16_OP2); in getFMAPatterns()
6273 case AArch64::FADDv8f16: in getFMAPatterns()
6274 Found |= Match(AArch64::FMULv8i16_indexed, 1, MCP::FMLAv8i16_indexed_OP1) || in getFMAPatterns()
6275 Match(AArch64::FMULv8f16, 1, MCP::FMLAv8f16_OP1); in getFMAPatterns()
6277 Found |= Match(AArch64::FMULv8i16_indexed, 2, MCP::FMLAv8i16_indexed_OP2) || in getFMAPatterns()
6278 Match(AArch64::FMULv8f16, 2, MCP::FMLAv8f16_OP2); in getFMAPatterns()
6280 case AArch64::FADDv2f32: in getFMAPatterns()
6281 Found |= Match(AArch64::FMULv2i32_indexed, 1, MCP::FMLAv2i32_indexed_OP1) || in getFMAPatterns()
6282 Match(AArch64::FMULv2f32, 1, MCP::FMLAv2f32_OP1); in getFMAPatterns()
6284 Found |= Match(AArch64::FMULv2i32_indexed, 2, MCP::FMLAv2i32_indexed_OP2) || in getFMAPatterns()
6285 Match(AArch64::FMULv2f32, 2, MCP::FMLAv2f32_OP2); in getFMAPatterns()
6287 case AArch64::FADDv2f64: in getFMAPatterns()
6288 Found |= Match(AArch64::FMULv2i64_indexed, 1, MCP::FMLAv2i64_indexed_OP1) || in getFMAPatterns()
6289 Match(AArch64::FMULv2f64, 1, MCP::FMLAv2f64_OP1); in getFMAPatterns()
6291 Found |= Match(AArch64::FMULv2i64_indexed, 2, MCP::FMLAv2i64_indexed_OP2) || in getFMAPatterns()
6292 Match(AArch64::FMULv2f64, 2, MCP::FMLAv2f64_OP2); in getFMAPatterns()
6294 case AArch64::FADDv4f32: in getFMAPatterns()
6295 Found |= Match(AArch64::FMULv4i32_indexed, 1, MCP::FMLAv4i32_indexed_OP1) || in getFMAPatterns()
6296 Match(AArch64::FMULv4f32, 1, MCP::FMLAv4f32_OP1); in getFMAPatterns()
6298 Found |= Match(AArch64::FMULv4i32_indexed, 2, MCP::FMLAv4i32_indexed_OP2) || in getFMAPatterns()
6299 Match(AArch64::FMULv4f32, 2, MCP::FMLAv4f32_OP2); in getFMAPatterns()
6301 case AArch64::FSUBHrr: in getFMAPatterns()
6302 Found = Match(AArch64::FMULHrr, 1, MCP::FMULSUBH_OP1); in getFMAPatterns()
6303 Found |= Match(AArch64::FMULHrr, 2, MCP::FMULSUBH_OP2); in getFMAPatterns()
6304 Found |= Match(AArch64::FNMULHrr, 1, MCP::FNMULSUBH_OP1); in getFMAPatterns()
6306 case AArch64::FSUBSrr: in getFMAPatterns()
6307 Found = Match(AArch64::FMULSrr, 1, MCP::FMULSUBS_OP1); in getFMAPatterns()
6309 Found |= Match(AArch64::FMULSrr, 2, MCP::FMULSUBS_OP2) || in getFMAPatterns()
6310 Match(AArch64::FMULv1i32_indexed, 2, MCP::FMLSv1i32_indexed_OP2); in getFMAPatterns()
6312 Found |= Match(AArch64::FNMULSrr, 1, MCP::FNMULSUBS_OP1); in getFMAPatterns()
6314 case AArch64::FSUBDrr: in getFMAPatterns()
6315 Found = Match(AArch64::FMULDrr, 1, MCP::FMULSUBD_OP1); in getFMAPatterns()
6317 Found |= Match(AArch64::FMULDrr, 2, MCP::FMULSUBD_OP2) || in getFMAPatterns()
6318 Match(AArch64::FMULv1i64_indexed, 2, MCP::FMLSv1i64_indexed_OP2); in getFMAPatterns()
6320 Found |= Match(AArch64::FNMULDrr, 1, MCP::FNMULSUBD_OP1); in getFMAPatterns()
6322 case AArch64::FSUBv4f16: in getFMAPatterns()
6323 Found |= Match(AArch64::FMULv4i16_indexed, 2, MCP::FMLSv4i16_indexed_OP2) || in getFMAPatterns()
6324 Match(AArch64::FMULv4f16, 2, MCP::FMLSv4f16_OP2); in getFMAPatterns()
6326 Found |= Match(AArch64::FMULv4i16_indexed, 1, MCP::FMLSv4i16_indexed_OP1) || in getFMAPatterns()
6327 Match(AArch64::FMULv4f16, 1, MCP::FMLSv4f16_OP1); in getFMAPatterns()
6329 case AArch64::FSUBv8f16: in getFMAPatterns()
6330 Found |= Match(AArch64::FMULv8i16_indexed, 2, MCP::FMLSv8i16_indexed_OP2) || in getFMAPatterns()
6331 Match(AArch64::FMULv8f16, 2, MCP::FMLSv8f16_OP2); in getFMAPatterns()
6333 Found |= Match(AArch64::FMULv8i16_indexed, 1, MCP::FMLSv8i16_indexed_OP1) || in getFMAPatterns()
6334 Match(AArch64::FMULv8f16, 1, MCP::FMLSv8f16_OP1); in getFMAPatterns()
6336 case AArch64::FSUBv2f32: in getFMAPatterns()
6337 Found |= Match(AArch64::FMULv2i32_indexed, 2, MCP::FMLSv2i32_indexed_OP2) || in getFMAPatterns()
6338 Match(AArch64::FMULv2f32, 2, MCP::FMLSv2f32_OP2); in getFMAPatterns()
6340 Found |= Match(AArch64::FMULv2i32_indexed, 1, MCP::FMLSv2i32_indexed_OP1) || in getFMAPatterns()
6341 Match(AArch64::FMULv2f32, 1, MCP::FMLSv2f32_OP1); in getFMAPatterns()
6343 case AArch64::FSUBv2f64: in getFMAPatterns()
6344 Found |= Match(AArch64::FMULv2i64_indexed, 2, MCP::FMLSv2i64_indexed_OP2) || in getFMAPatterns()
6345 Match(AArch64::FMULv2f64, 2, MCP::FMLSv2f64_OP2); in getFMAPatterns()
6347 Found |= Match(AArch64::FMULv2i64_indexed, 1, MCP::FMLSv2i64_indexed_OP1) || in getFMAPatterns()
6348 Match(AArch64::FMULv2f64, 1, MCP::FMLSv2f64_OP1); in getFMAPatterns()
6350 case AArch64::FSUBv4f32: in getFMAPatterns()
6351 Found |= Match(AArch64::FMULv4i32_indexed, 2, MCP::FMLSv4i32_indexed_OP2) || in getFMAPatterns()
6352 Match(AArch64::FMULv4f32, 2, MCP::FMLSv4f32_OP2); in getFMAPatterns()
6354 Found |= Match(AArch64::FMULv4i32_indexed, 1, MCP::FMLSv4i32_indexed_OP1) || in getFMAPatterns()
6355 Match(AArch64::FMULv4f32, 1, MCP::FMLSv4f32_OP1); in getFMAPatterns()
6388 case AArch64::FMULv2f32: in getFMULPatterns()
6389 Found = Match(AArch64::DUPv2i32lane, 1, MCP::FMULv2i32_indexed_OP1); in getFMULPatterns()
6390 Found |= Match(AArch64::DUPv2i32lane, 2, MCP::FMULv2i32_indexed_OP2); in getFMULPatterns()
6392 case AArch64::FMULv2f64: in getFMULPatterns()
6393 Found = Match(AArch64::DUPv2i64lane, 1, MCP::FMULv2i64_indexed_OP1); in getFMULPatterns()
6394 Found |= Match(AArch64::DUPv2i64lane, 2, MCP::FMULv2i64_indexed_OP2); in getFMULPatterns()
6396 case AArch64::FMULv4f16: in getFMULPatterns()
6397 Found = Match(AArch64::DUPv4i16lane, 1, MCP::FMULv4i16_indexed_OP1); in getFMULPatterns()
6398 Found |= Match(AArch64::DUPv4i16lane, 2, MCP::FMULv4i16_indexed_OP2); in getFMULPatterns()
6400 case AArch64::FMULv4f32: in getFMULPatterns()
6401 Found = Match(AArch64::DUPv4i32lane, 1, MCP::FMULv4i32_indexed_OP1); in getFMULPatterns()
6402 Found |= Match(AArch64::DUPv4i32lane, 2, MCP::FMULv4i32_indexed_OP2); in getFMULPatterns()
6404 case AArch64::FMULv8f16: in getFMULPatterns()
6405 Found = Match(AArch64::DUPv8i16lane, 1, MCP::FMULv8i16_indexed_OP1); in getFMULPatterns()
6406 Found |= Match(AArch64::DUPv8i16lane, 2, MCP::FMULv8i16_indexed_OP2); in getFMULPatterns()
6437 case AArch64::FNEGDr: in getFNEGPatterns()
6438 return Match(AArch64::FMADDDrrr, AArch64MachineCombinerPattern::FNMADD); in getFNEGPatterns()
6439 case AArch64::FNEGSr: in getFNEGPatterns()
6440 return Match(AArch64::FMADDSrrr, AArch64MachineCombinerPattern::FNMADD); in getFNEGPatterns()
6571 case AArch64::SUBWrr: in getMiscPatterns()
6572 case AArch64::SUBSWrr: in getMiscPatterns()
6573 case AArch64::SUBXrr: in getMiscPatterns()
6574 case AArch64::SUBSXrr: in getMiscPatterns()
6582 Root.findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, true) == in getMiscPatterns()
6586 if (canCombine(MBB, Root.getOperand(2), AArch64::ADDWrr) || in getMiscPatterns()
6587 canCombine(MBB, Root.getOperand(2), AArch64::ADDSWrr) || in getMiscPatterns()
6588 canCombine(MBB, Root.getOperand(2), AArch64::ADDXrr) || in getMiscPatterns()
6589 canCombine(MBB, Root.getOperand(2), AArch64::ADDSXrr)) { in getMiscPatterns()
6725 if (AArch64::FPR32RegClass.hasSubClassEq(RC)) in genFNegatedMAD()
6726 Opc = AArch64::FNMADDSrrr; in genFNegatedMAD()
6727 else if (AArch64::FPR64RegClass.hasSubClassEq(RC)) in genFNegatedMAD()
6728 Opc = AArch64::FNMADDDrrr; in genFNegatedMAD()
6947 if (Opcode == AArch64::SUBSWrr) in genSubAdd2SubSub()
6948 Opcode = AArch64::SUBWrr; in genSubAdd2SubSub()
6949 else if (Opcode == AArch64::SUBSXrr) in genSubAdd2SubSub()
6950 Opcode = AArch64::SUBXrr; in genSubAdd2SubSub()
6952 assert((Opcode == AArch64::SUBWrr || Opcode == AArch64::SUBXrr) && in genSubAdd2SubSub()
7018 Opc = AArch64::MADDWrrr; in genAlternativeCodeSequence()
7019 RC = &AArch64::GPR32RegClass; in genAlternativeCodeSequence()
7021 Opc = AArch64::MADDXrrr; in genAlternativeCodeSequence()
7022 RC = &AArch64::GPR64RegClass; in genAlternativeCodeSequence()
7033 Opc = AArch64::MADDWrrr; in genAlternativeCodeSequence()
7034 RC = &AArch64::GPR32RegClass; in genAlternativeCodeSequence()
7036 Opc = AArch64::MADDXrrr; in genAlternativeCodeSequence()
7037 RC = &AArch64::GPR64RegClass; in genAlternativeCodeSequence()
7051 OrrOpc = AArch64::ORRWri; in genAlternativeCodeSequence()
7052 OrrRC = &AArch64::GPR32spRegClass; in genAlternativeCodeSequence()
7054 ZeroReg = AArch64::WZR; in genAlternativeCodeSequence()
7055 Opc = AArch64::MADDWrrr; in genAlternativeCodeSequence()
7056 RC = &AArch64::GPR32RegClass; in genAlternativeCodeSequence()
7058 OrrOpc = AArch64::ORRXri; in genAlternativeCodeSequence()
7059 OrrRC = &AArch64::GPR64spRegClass; in genAlternativeCodeSequence()
7061 ZeroReg = AArch64::XZR; in genAlternativeCodeSequence()
7062 Opc = AArch64::MADDXrrr; in genAlternativeCodeSequence()
7063 RC = &AArch64::GPR64RegClass; in genAlternativeCodeSequence()
7087 assert((MovI->Opcode == AArch64::MOVNWi || in genAlternativeCodeSequence()
7088 MovI->Opcode == AArch64::MOVZWi) && in genAlternativeCodeSequence()
7091 assert((MovI->Opcode == AArch64::MOVNXi || in genAlternativeCodeSequence()
7092 MovI->Opcode == AArch64::MOVZXi) && in genAlternativeCodeSequence()
7113 SubOpc = AArch64::SUBWrr; in genAlternativeCodeSequence()
7114 SubRC = &AArch64::GPR32spRegClass; in genAlternativeCodeSequence()
7115 ZeroReg = AArch64::WZR; in genAlternativeCodeSequence()
7116 Opc = AArch64::MADDWrrr; in genAlternativeCodeSequence()
7117 RC = &AArch64::GPR32RegClass; in genAlternativeCodeSequence()
7119 SubOpc = AArch64::SUBXrr; in genAlternativeCodeSequence()
7120 SubRC = &AArch64::GPR64spRegClass; in genAlternativeCodeSequence()
7121 ZeroReg = AArch64::XZR; in genAlternativeCodeSequence()
7122 Opc = AArch64::MADDXrrr; in genAlternativeCodeSequence()
7123 RC = &AArch64::GPR64RegClass; in genAlternativeCodeSequence()
7143 Opc = AArch64::MSUBWrrr; in genAlternativeCodeSequence()
7144 RC = &AArch64::GPR32RegClass; in genAlternativeCodeSequence()
7146 Opc = AArch64::MSUBXrrr; in genAlternativeCodeSequence()
7147 RC = &AArch64::GPR64RegClass; in genAlternativeCodeSequence()
7161 OrrOpc = AArch64::ORRWri; in genAlternativeCodeSequence()
7162 OrrRC = &AArch64::GPR32spRegClass; in genAlternativeCodeSequence()
7164 ZeroReg = AArch64::WZR; in genAlternativeCodeSequence()
7165 Opc = AArch64::MADDWrrr; in genAlternativeCodeSequence()
7166 RC = &AArch64::GPR32RegClass; in genAlternativeCodeSequence()
7168 OrrOpc = AArch64::ORRXri; in genAlternativeCodeSequence()
7169 OrrRC = &AArch64::GPR64spRegClass; in genAlternativeCodeSequence()
7171 ZeroReg = AArch64::XZR; in genAlternativeCodeSequence()
7172 Opc = AArch64::MADDXrrr; in genAlternativeCodeSequence()
7173 RC = &AArch64::GPR64RegClass; in genAlternativeCodeSequence()
7196 assert((MovI->Opcode == AArch64::MOVNWi || in genAlternativeCodeSequence()
7197 MovI->Opcode == AArch64::MOVZWi) && in genAlternativeCodeSequence()
7200 assert((MovI->Opcode == AArch64::MOVNXi || in genAlternativeCodeSequence()
7201 MovI->Opcode == AArch64::MOVZXi) && in genAlternativeCodeSequence()
7214 Opc = AArch64::MLAv8i8; in genAlternativeCodeSequence()
7215 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7219 Opc = AArch64::MLAv8i8; in genAlternativeCodeSequence()
7220 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7224 Opc = AArch64::MLAv16i8; in genAlternativeCodeSequence()
7225 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7229 Opc = AArch64::MLAv16i8; in genAlternativeCodeSequence()
7230 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7234 Opc = AArch64::MLAv4i16; in genAlternativeCodeSequence()
7235 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7239 Opc = AArch64::MLAv4i16; in genAlternativeCodeSequence()
7240 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7244 Opc = AArch64::MLAv8i16; in genAlternativeCodeSequence()
7245 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7249 Opc = AArch64::MLAv8i16; in genAlternativeCodeSequence()
7250 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7254 Opc = AArch64::MLAv2i32; in genAlternativeCodeSequence()
7255 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7259 Opc = AArch64::MLAv2i32; in genAlternativeCodeSequence()
7260 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7264 Opc = AArch64::MLAv4i32; in genAlternativeCodeSequence()
7265 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7269 Opc = AArch64::MLAv4i32; in genAlternativeCodeSequence()
7270 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7275 Opc = AArch64::MLAv8i8; in genAlternativeCodeSequence()
7276 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7278 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv8i8, in genAlternativeCodeSequence()
7282 Opc = AArch64::MLSv8i8; in genAlternativeCodeSequence()
7283 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7287 Opc = AArch64::MLAv16i8; in genAlternativeCodeSequence()
7288 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7290 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv16i8, in genAlternativeCodeSequence()
7294 Opc = AArch64::MLSv16i8; in genAlternativeCodeSequence()
7295 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7299 Opc = AArch64::MLAv4i16; in genAlternativeCodeSequence()
7300 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7302 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv4i16, in genAlternativeCodeSequence()
7306 Opc = AArch64::MLSv4i16; in genAlternativeCodeSequence()
7307 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7311 Opc = AArch64::MLAv8i16; in genAlternativeCodeSequence()
7312 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7314 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv8i16, in genAlternativeCodeSequence()
7318 Opc = AArch64::MLSv8i16; in genAlternativeCodeSequence()
7319 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7323 Opc = AArch64::MLAv2i32; in genAlternativeCodeSequence()
7324 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7326 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv2i32, in genAlternativeCodeSequence()
7330 Opc = AArch64::MLSv2i32; in genAlternativeCodeSequence()
7331 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7335 Opc = AArch64::MLAv4i32; in genAlternativeCodeSequence()
7336 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7338 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv4i32, in genAlternativeCodeSequence()
7342 Opc = AArch64::MLSv4i32; in genAlternativeCodeSequence()
7343 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7348 Opc = AArch64::MLAv4i16_indexed; in genAlternativeCodeSequence()
7349 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7353 Opc = AArch64::MLAv4i16_indexed; in genAlternativeCodeSequence()
7354 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7358 Opc = AArch64::MLAv8i16_indexed; in genAlternativeCodeSequence()
7359 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7363 Opc = AArch64::MLAv8i16_indexed; in genAlternativeCodeSequence()
7364 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7368 Opc = AArch64::MLAv2i32_indexed; in genAlternativeCodeSequence()
7369 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7373 Opc = AArch64::MLAv2i32_indexed; in genAlternativeCodeSequence()
7374 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7378 Opc = AArch64::MLAv4i32_indexed; in genAlternativeCodeSequence()
7379 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7383 Opc = AArch64::MLAv4i32_indexed; in genAlternativeCodeSequence()
7384 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7389 Opc = AArch64::MLAv4i16_indexed; in genAlternativeCodeSequence()
7390 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7392 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv4i16, in genAlternativeCodeSequence()
7396 Opc = AArch64::MLSv4i16_indexed; in genAlternativeCodeSequence()
7397 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7401 Opc = AArch64::MLAv8i16_indexed; in genAlternativeCodeSequence()
7402 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7404 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv8i16, in genAlternativeCodeSequence()
7408 Opc = AArch64::MLSv8i16_indexed; in genAlternativeCodeSequence()
7409 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7413 Opc = AArch64::MLAv2i32_indexed; in genAlternativeCodeSequence()
7414 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7416 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv2i32, in genAlternativeCodeSequence()
7420 Opc = AArch64::MLSv2i32_indexed; in genAlternativeCodeSequence()
7421 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7425 Opc = AArch64::MLAv4i32_indexed; in genAlternativeCodeSequence()
7426 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7428 InstrIdxForVirtReg, 1, Opc, AArch64::NEGv4i32, in genAlternativeCodeSequence()
7432 Opc = AArch64::MLSv4i32_indexed; in genAlternativeCodeSequence()
7433 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7439 Opc = AArch64::FMADDHrrr; in genAlternativeCodeSequence()
7440 RC = &AArch64::FPR16RegClass; in genAlternativeCodeSequence()
7444 Opc = AArch64::FMADDSrrr; in genAlternativeCodeSequence()
7445 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7449 Opc = AArch64::FMADDDrrr; in genAlternativeCodeSequence()
7450 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7455 Opc = AArch64::FMADDHrrr; in genAlternativeCodeSequence()
7456 RC = &AArch64::FPR16RegClass; in genAlternativeCodeSequence()
7460 Opc = AArch64::FMADDSrrr; in genAlternativeCodeSequence()
7461 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7465 Opc = AArch64::FMADDDrrr; in genAlternativeCodeSequence()
7466 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7471 Opc = AArch64::FMLAv1i32_indexed; in genAlternativeCodeSequence()
7472 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7477 Opc = AArch64::FMLAv1i32_indexed; in genAlternativeCodeSequence()
7478 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7484 Opc = AArch64::FMLAv1i64_indexed; in genAlternativeCodeSequence()
7485 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7490 Opc = AArch64::FMLAv1i64_indexed; in genAlternativeCodeSequence()
7491 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7497 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7498 Opc = AArch64::FMLAv4i16_indexed; in genAlternativeCodeSequence()
7503 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7504 Opc = AArch64::FMLAv4f16; in genAlternativeCodeSequence()
7509 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7510 Opc = AArch64::FMLAv4i16_indexed; in genAlternativeCodeSequence()
7515 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7516 Opc = AArch64::FMLAv4f16; in genAlternativeCodeSequence()
7523 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7525 Opc = AArch64::FMLAv2i32_indexed; in genAlternativeCodeSequence()
7529 Opc = AArch64::FMLAv2f32; in genAlternativeCodeSequence()
7536 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7538 Opc = AArch64::FMLAv2i32_indexed; in genAlternativeCodeSequence()
7542 Opc = AArch64::FMLAv2f32; in genAlternativeCodeSequence()
7549 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7550 Opc = AArch64::FMLAv8i16_indexed; in genAlternativeCodeSequence()
7555 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7556 Opc = AArch64::FMLAv8f16; in genAlternativeCodeSequence()
7561 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7562 Opc = AArch64::FMLAv8i16_indexed; in genAlternativeCodeSequence()
7567 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7568 Opc = AArch64::FMLAv8f16; in genAlternativeCodeSequence()
7575 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7577 Opc = AArch64::FMLAv2i64_indexed; in genAlternativeCodeSequence()
7581 Opc = AArch64::FMLAv2f64; in genAlternativeCodeSequence()
7588 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7590 Opc = AArch64::FMLAv2i64_indexed; in genAlternativeCodeSequence()
7594 Opc = AArch64::FMLAv2f64; in genAlternativeCodeSequence()
7602 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7604 Opc = AArch64::FMLAv4i32_indexed; in genAlternativeCodeSequence()
7608 Opc = AArch64::FMLAv4f32; in genAlternativeCodeSequence()
7616 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7618 Opc = AArch64::FMLAv4i32_indexed; in genAlternativeCodeSequence()
7622 Opc = AArch64::FMLAv4f32; in genAlternativeCodeSequence()
7629 Opc = AArch64::FNMSUBHrrr; in genAlternativeCodeSequence()
7630 RC = &AArch64::FPR16RegClass; in genAlternativeCodeSequence()
7634 Opc = AArch64::FNMSUBSrrr; in genAlternativeCodeSequence()
7635 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7639 Opc = AArch64::FNMSUBDrrr; in genAlternativeCodeSequence()
7640 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7645 Opc = AArch64::FNMADDHrrr; in genAlternativeCodeSequence()
7646 RC = &AArch64::FPR16RegClass; in genAlternativeCodeSequence()
7650 Opc = AArch64::FNMADDSrrr; in genAlternativeCodeSequence()
7651 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7655 Opc = AArch64::FNMADDDrrr; in genAlternativeCodeSequence()
7656 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7661 Opc = AArch64::FMSUBHrrr; in genAlternativeCodeSequence()
7662 RC = &AArch64::FPR16RegClass; in genAlternativeCodeSequence()
7666 Opc = AArch64::FMSUBSrrr; in genAlternativeCodeSequence()
7667 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7671 Opc = AArch64::FMSUBDrrr; in genAlternativeCodeSequence()
7672 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7677 Opc = AArch64::FMLSv1i32_indexed; in genAlternativeCodeSequence()
7678 RC = &AArch64::FPR32RegClass; in genAlternativeCodeSequence()
7684 Opc = AArch64::FMLSv1i64_indexed; in genAlternativeCodeSequence()
7685 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7692 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7695 BuildMI(MF, MIMetadata(Root), TII->get(AArch64::FNEGv4f16), NewVR) in genAlternativeCodeSequence()
7700 Opc = AArch64::FMLAv4f16; in genAlternativeCodeSequence()
7704 Opc = AArch64::FMLAv4i16_indexed; in genAlternativeCodeSequence()
7711 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7712 Opc = AArch64::FMLSv4f16; in genAlternativeCodeSequence()
7717 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7718 Opc = AArch64::FMLSv4i16_indexed; in genAlternativeCodeSequence()
7725 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7727 Opc = AArch64::FMLSv2i32_indexed; in genAlternativeCodeSequence()
7731 Opc = AArch64::FMLSv2f32; in genAlternativeCodeSequence()
7739 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7742 BuildMI(MF, MIMetadata(Root), TII->get(AArch64::FNEGv8f16), NewVR) in genAlternativeCodeSequence()
7747 Opc = AArch64::FMLAv8f16; in genAlternativeCodeSequence()
7751 Opc = AArch64::FMLAv8i16_indexed; in genAlternativeCodeSequence()
7758 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7759 Opc = AArch64::FMLSv8f16; in genAlternativeCodeSequence()
7764 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7765 Opc = AArch64::FMLSv8i16_indexed; in genAlternativeCodeSequence()
7772 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7774 Opc = AArch64::FMLSv2i64_indexed; in genAlternativeCodeSequence()
7778 Opc = AArch64::FMLSv2f64; in genAlternativeCodeSequence()
7786 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7788 Opc = AArch64::FMLSv4i32_indexed; in genAlternativeCodeSequence()
7792 Opc = AArch64::FMLSv4f32; in genAlternativeCodeSequence()
7799 RC = &AArch64::FPR64RegClass; in genAlternativeCodeSequence()
7802 BuildMI(MF, MIMetadata(Root), TII->get(AArch64::FNEGv2f32), NewVR) in genAlternativeCodeSequence()
7807 Opc = AArch64::FMLAv2i32_indexed; in genAlternativeCodeSequence()
7811 Opc = AArch64::FMLAv2f32; in genAlternativeCodeSequence()
7819 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7822 BuildMI(MF, MIMetadata(Root), TII->get(AArch64::FNEGv4f32), NewVR) in genAlternativeCodeSequence()
7827 Opc = AArch64::FMLAv4i32_indexed; in genAlternativeCodeSequence()
7831 Opc = AArch64::FMLAv4f32; in genAlternativeCodeSequence()
7839 RC = &AArch64::FPR128RegClass; in genAlternativeCodeSequence()
7842 BuildMI(MF, MIMetadata(Root), TII->get(AArch64::FNEGv2f64), NewVR) in genAlternativeCodeSequence()
7847 Opc = AArch64::FMLAv2i64_indexed; in genAlternativeCodeSequence()
7851 Opc = AArch64::FMLAv2f64; in genAlternativeCodeSequence()
7862 genIndexedMultiply(Root, InsInstrs, IdxDupOp, AArch64::FMULv2i32_indexed, in genAlternativeCodeSequence()
7863 &AArch64::FPR128RegClass, MRI); in genAlternativeCodeSequence()
7871 genIndexedMultiply(Root, InsInstrs, IdxDupOp, AArch64::FMULv2i64_indexed, in genAlternativeCodeSequence()
7872 &AArch64::FPR128RegClass, MRI); in genAlternativeCodeSequence()
7880 genIndexedMultiply(Root, InsInstrs, IdxDupOp, AArch64::FMULv4i16_indexed, in genAlternativeCodeSequence()
7881 &AArch64::FPR128_loRegClass, MRI); in genAlternativeCodeSequence()
7889 genIndexedMultiply(Root, InsInstrs, IdxDupOp, AArch64::FMULv4i32_indexed, in genAlternativeCodeSequence()
7890 &AArch64::FPR128RegClass, MRI); in genAlternativeCodeSequence()
7898 genIndexedMultiply(Root, InsInstrs, IdxDupOp, AArch64::FMULv8i16_indexed, in genAlternativeCodeSequence()
7899 &AArch64::FPR128_loRegClass, MRI); in genAlternativeCodeSequence()
7966 case AArch64::Bcc: in optimizeCondBranch()
7968 case AArch64::CBZW: in optimizeCondBranch()
7969 case AArch64::CBZX: in optimizeCondBranch()
7972 case AArch64::CBNZW: in optimizeCondBranch()
7973 case AArch64::CBNZX: in optimizeCondBranch()
7977 case AArch64::TBZW: in optimizeCondBranch()
7978 case AArch64::TBZX: in optimizeCondBranch()
7982 case AArch64::TBNZW: in optimizeCondBranch()
7983 case AArch64::TBNZX: in optimizeCondBranch()
8020 case AArch64::ANDWri: in optimizeCondBranch()
8021 case AArch64::ANDXri: { in optimizeCondBranch()
8029 bool Is32Bit = (DefMI->getOpcode() == AArch64::ANDWri); in optimizeCondBranch()
8047 ? (IsNegativeBranch ? AArch64::TBNZW : AArch64::TBZW) in optimizeCondBranch()
8048 : (IsNegativeBranch ? AArch64::TBNZX : AArch64::TBZX); in optimizeCondBranch()
8062 NewMI->getOperand(0).setSubReg(AArch64::sub_32); in optimizeCondBranch()
8067 case AArch64::CSINCWr: in optimizeCondBranch()
8068 case AArch64::CSINCXr: { in optimizeCondBranch()
8069 if (!(DefMI->getOperand(1).getReg() == AArch64::WZR && in optimizeCondBranch()
8070 DefMI->getOperand(2).getReg() == AArch64::WZR) && in optimizeCondBranch()
8071 !(DefMI->getOperand(1).getReg() == AArch64::XZR && in optimizeCondBranch()
8072 DefMI->getOperand(2).getReg() == AArch64::XZR)) in optimizeCondBranch()
8075 if (DefMI->findRegisterDefOperandIdx(AArch64::NZCV, /*TRI=*/nullptr, in optimizeCondBranch()
8090 BuildMI(RefToMBB, MI, DL, get(AArch64::Bcc)).addImm(CC).addMBB(TBB); in optimizeCondBranch()
8108 {MO_PAGE, "aarch64-page"}, {MO_PAGEOFF, "aarch64-pageoff"}, in getSerializableDirectMachineOperandTargetFlags()
8109 {MO_G3, "aarch64-g3"}, {MO_G2, "aarch64-g2"}, in getSerializableDirectMachineOperandTargetFlags()
8110 {MO_G1, "aarch64-g1"}, {MO_G0, "aarch64-g0"}, in getSerializableDirectMachineOperandTargetFlags()
8111 {MO_HI12, "aarch64-hi12"}}; in getSerializableDirectMachineOperandTargetFlags()
8120 {MO_COFFSTUB, "aarch64-coffstub"}, in getSerializableBitmaskMachineOperandTargetFlags()
8121 {MO_GOT, "aarch64-got"}, in getSerializableBitmaskMachineOperandTargetFlags()
8122 {MO_NC, "aarch64-nc"}, in getSerializableBitmaskMachineOperandTargetFlags()
8123 {MO_S, "aarch64-s"}, in getSerializableBitmaskMachineOperandTargetFlags()
8124 {MO_TLS, "aarch64-tls"}, in getSerializableBitmaskMachineOperandTargetFlags()
8125 {MO_DLLIMPORT, "aarch64-dllimport"}, in getSerializableBitmaskMachineOperandTargetFlags()
8126 {MO_PREL, "aarch64-prel"}, in getSerializableBitmaskMachineOperandTargetFlags()
8127 {MO_TAGGED, "aarch64-tagged"}, in getSerializableBitmaskMachineOperandTargetFlags()
8128 {MO_ARM64EC_CALLMANGLE, "aarch64-arm64ec-callmangle"}, in getSerializableBitmaskMachineOperandTargetFlags()
8136 {{MOSuppressPair, "aarch64-suppress-pair"}, in getSerializableMachineMemOperandTargetFlags()
8137 {MOStridedAccess, "aarch64-strided-access"}}; in getSerializableMachineMemOperandTargetFlags()
8243 for (unsigned Reg : AArch64::GPR64RegClass) { in findRegisterToSaveLRTo()
8245 Reg != AArch64::LR && // LR is not reserved, but don't use it. in findRegisterToSaveLRTo()
8246 Reg != AArch64::X16 && // X16 is not guaranteed to be preserved. in findRegisterToSaveLRTo()
8247 Reg != AArch64::X17 && // Ditto for X17. in findRegisterToSaveLRTo()
8354 if (MI.modifiesRegister(AArch64::SP, &TRI)) { in getOutliningCandidateInfo()
8356 case AArch64::ADDXri: in getOutliningCandidateInfo()
8357 case AArch64::ADDWri: in getOutliningCandidateInfo()
8366 if (MI.getOperand(1).getReg() == AArch64::SP) in getOutliningCandidateInfo()
8371 case AArch64::SUBXri: in getOutliningCandidateInfo()
8372 case AArch64::SUBWri: in getOutliningCandidateInfo()
8381 if (MI.getOperand(1).getReg() == AArch64::SP) in getOutliningCandidateInfo()
8452 if (!MI.modifiesRegister(AArch64::SP, &TRI) && in getOutliningCandidateInfo()
8453 !MI.readsRegister(AArch64::SP, &TRI)) in getOutliningCandidateInfo()
8459 if (MI.modifiesRegister(AArch64::SP, &TRI)) in getOutliningCandidateInfo()
8472 !Base->isReg() || Base->getReg() != AArch64::SP) in getOutliningCandidateInfo()
8516 else if (LastInstrOpcode == AArch64::BL || in getOutliningCandidateInfo()
8517 ((LastInstrOpcode == AArch64::BLR || in getOutliningCandidateInfo()
8518 LastInstrOpcode == AArch64::BLRNoIP) && in getOutliningCandidateInfo()
8537 ? C.isAvailableAcrossAndOutOfSeq(AArch64::LR, TRI) in getOutliningCandidateInfo()
8565 else if (C.isAvailableInsideSeq(AArch64::SP, TRI)) { in getOutliningCandidateInfo()
8620 // in the AArch64 MachineOutliner. This is because the code to do this in getOutliningCandidateInfo()
8639 (!C.isAvailableAcrossAndOutOfSeq(AArch64::LR, TRI) || in getOutliningCandidateInfo()
8759 // According to the AArch64 Procedure Call Standard, the following are in getOutlinableRanges()
8773 return LRU.available(AArch64::W16) && LRU.available(AArch64::W17) && in getOutlinableRanges()
8774 LRU.available(AArch64::NZCV); in getOutlinableRanges()
8844 LRAvailableEverywhere &= LRU.available(AArch64::LR); in getOutlinableRanges()
8875 case AArch64::PACM: in getOutliningTypeImpl()
8876 case AArch64::PACIASP: in getOutliningTypeImpl()
8877 case AArch64::PACIBSP: in getOutliningTypeImpl()
8878 case AArch64::PACIASPPC: in getOutliningTypeImpl()
8879 case AArch64::PACIBSPPC: in getOutliningTypeImpl()
8880 case AArch64::AUTIASP: in getOutliningTypeImpl()
8881 case AArch64::AUTIBSP: in getOutliningTypeImpl()
8882 case AArch64::AUTIASPPCi: in getOutliningTypeImpl()
8883 case AArch64::AUTIASPPCr: in getOutliningTypeImpl()
8884 case AArch64::AUTIBSPPCi: in getOutliningTypeImpl()
8885 case AArch64::AUTIBSPPCr: in getOutliningTypeImpl()
8886 case AArch64::RETAA: in getOutliningTypeImpl()
8887 case AArch64::RETAB: in getOutliningTypeImpl()
8888 case AArch64::RETAASPPCi: in getOutliningTypeImpl()
8889 case AArch64::RETAASPPCr: in getOutliningTypeImpl()
8890 case AArch64::RETABSPPCi: in getOutliningTypeImpl()
8891 case AArch64::RETABSPPCr: in getOutliningTypeImpl()
8892 case AArch64::EMITBKEY: in getOutliningTypeImpl()
8893 case AArch64::PAUTH_PROLOGUE: in getOutliningTypeImpl()
8894 case AArch64::PAUTH_EPILOGUE: in getOutliningTypeImpl()
8925 (MOP.getReg() == AArch64::LR || MOP.getReg() == AArch64::W30)) in getOutliningTypeImpl()
8932 if (MI.getOpcode() == AArch64::ADRP) in getOutliningTypeImpl()
8969 if (MI.getOpcode() == AArch64::BLR || in getOutliningTypeImpl()
8970 MI.getOpcode() == AArch64::BLRNoIP || MI.getOpcode() == AArch64::BL) in getOutliningTypeImpl()
8998 if (MI.readsRegister(AArch64::W30, &getRegisterInfo()) || in getOutliningTypeImpl()
8999 MI.modifiesRegister(AArch64::W30, &getRegisterInfo())) in getOutliningTypeImpl()
9021 (Base->isReg() && Base->getReg() != AArch64::SP)) in fixupPostOutline()
9048 BuildMI(MBB, MBB.begin(), DebugLoc(), TII->get(AArch64::PAUTH_PROLOGUE)) in signOutlinedFunction()
9051 TII->get(AArch64::PAUTH_EPILOGUE)) in signOutlinedFunction()
9068 if (Call->getOpcode() == AArch64::BL) { in buildOutlinedFrame()
9069 TailOpcode = AArch64::TCRETURNdi; in buildOutlinedFrame()
9071 assert(Call->getOpcode() == AArch64::BLR || in buildOutlinedFrame()
9072 Call->getOpcode() == AArch64::BLRNoIP); in buildOutlinedFrame()
9073 TailOpcode = AArch64::TCRETURNriALL; in buildOutlinedFrame()
9104 if (!MBB.isLiveIn(AArch64::LR)) in buildOutlinedFrame()
9105 MBB.addLiveIn(AArch64::LR); in buildOutlinedFrame()
9115 MachineInstr *STRXpre = BuildMI(MF, DebugLoc(), get(AArch64::STRXpre)) in buildOutlinedFrame()
9116 .addReg(AArch64::SP, RegState::Define) in buildOutlinedFrame()
9117 .addReg(AArch64::LR) in buildOutlinedFrame()
9118 .addReg(AArch64::SP) in buildOutlinedFrame()
9125 unsigned DwarfReg = MRI->getDwarfRegNum(AArch64::LR, true); in buildOutlinedFrame()
9130 BuildMI(MBB, It, DebugLoc(), get(AArch64::CFI_INSTRUCTION)) in buildOutlinedFrame()
9138 BuildMI(MBB, It, DebugLoc(), get(AArch64::CFI_INSTRUCTION)) in buildOutlinedFrame()
9144 MachineInstr *LDRXpost = BuildMI(MF, DebugLoc(), get(AArch64::LDRXpost)) in buildOutlinedFrame()
9145 .addReg(AArch64::SP, RegState::Define) in buildOutlinedFrame()
9146 .addReg(AArch64::LR, RegState::Define) in buildOutlinedFrame()
9147 .addReg(AArch64::SP) in buildOutlinedFrame()
9164 if (!MBB.isLiveIn(AArch64::LR)) in buildOutlinedFrame()
9165 MBB.addLiveIn(AArch64::LR); in buildOutlinedFrame()
9167 MachineInstr *ret = BuildMI(MF, DebugLoc(), get(AArch64::RET)) in buildOutlinedFrame()
9168 .addReg(AArch64::LR); in buildOutlinedFrame()
9191 It = MBB.insert(It, BuildMI(MF, DebugLoc(), get(AArch64::TCRETURNdi)) in insertOutlinedCall()
9201 It = MBB.insert(It, BuildMI(MF, DebugLoc(), get(AArch64::BL)) in insertOutlinedCall()
9221 if (!MBB.isLiveIn(AArch64::LR)) in insertOutlinedCall()
9222 MBB.addLiveIn(AArch64::LR); in insertOutlinedCall()
9225 Save = BuildMI(MF, DebugLoc(), get(AArch64::ORRXrs), Reg) in insertOutlinedCall()
9226 .addReg(AArch64::XZR) in insertOutlinedCall()
9227 .addReg(AArch64::LR) in insertOutlinedCall()
9229 Restore = BuildMI(MF, DebugLoc(), get(AArch64::ORRXrs), AArch64::LR) in insertOutlinedCall()
9230 .addReg(AArch64::XZR) in insertOutlinedCall()
9235 Save = BuildMI(MF, DebugLoc(), get(AArch64::STRXpre)) in insertOutlinedCall()
9236 .addReg(AArch64::SP, RegState::Define) in insertOutlinedCall()
9237 .addReg(AArch64::LR) in insertOutlinedCall()
9238 .addReg(AArch64::SP) in insertOutlinedCall()
9240 Restore = BuildMI(MF, DebugLoc(), get(AArch64::LDRXpost)) in insertOutlinedCall()
9241 .addReg(AArch64::SP, RegState::Define) in insertOutlinedCall()
9242 .addReg(AArch64::LR, RegState::Define) in insertOutlinedCall()
9243 .addReg(AArch64::SP) in insertOutlinedCall()
9251 It = MBB.insert(It, BuildMI(MF, DebugLoc(), get(AArch64::BL)) in insertOutlinedCall()
9274 BuildMI(MBB, Iter, DL, get(AArch64::MOVZXi), Reg).addImm(0).addImm(0); in buildClearRegister()
9276 BuildMI(MBB, Iter, DL, get(AArch64::DUP_ZI_D), Reg) in buildClearRegister()
9280 BuildMI(MBB, Iter, DL, get(AArch64::MOVIv2d_ns), Reg) in buildClearRegister()
9288 // AArch64::ORRWrs and AArch64::ORRXrs with WZR/XZR reg in isCopyInstrImpl()
9290 if (MI.getOpcode() == AArch64::ORRWrs && in isCopyInstrImpl()
9291 MI.getOperand(1).getReg() == AArch64::WZR && in isCopyInstrImpl()
9297 MI.findRegisterDefOperandIdx(MI.getOperand(0).getReg() - AArch64::W0 + in isCopyInstrImpl()
9298 AArch64::X0, in isCopyInstrImpl()
9302 if (MI.getOpcode() == AArch64::ORRXrs && in isCopyInstrImpl()
9303 MI.getOperand(1).getReg() == AArch64::XZR && in isCopyInstrImpl()
9312 if (MI.getOpcode() == AArch64::ORRWrs && in isCopyLikeInstrImpl()
9313 MI.getOperand(1).getReg() == AArch64::WZR && in isCopyLikeInstrImpl()
9333 case AArch64::SUBWri: in isAddImmediate()
9334 case AArch64::SUBXri: in isAddImmediate()
9335 case AArch64::SUBSWri: in isAddImmediate()
9336 case AArch64::SUBSXri: in isAddImmediate()
9339 case AArch64::ADDSWri: in isAddImmediate()
9340 case AArch64::ADDSXri: in isAddImmediate()
9341 case AArch64::ADDWri: in isAddImmediate()
9342 case AArch64::ADDXri: { in isAddImmediate()
9376 if (MI.getOpcode() == AArch64::ORRWrs && in describeORRLoadedValue()
9381 if (MI.getOpcode() == AArch64::ORRXrs && in describeORRLoadedValue()
9383 Register SrcSubReg = TRI->getSubReg(SrcReg, AArch64::sub_32); in describeORRLoadedValue()
9394 // Functions cannot be split to different sections on AArch64 if they have in isFunctionSafeToSplit()
9409 return MI.getOpcode() == AArch64::INLINEASM_BR; in isMBBSafeToSplitToCold()
9429 case AArch64::JumpTableDest32: in isMBBSafeToSplitToCold()
9430 case AArch64::JumpTableDest16: in isMBBSafeToSplitToCold()
9431 case AArch64::JumpTableDest8: in isMBBSafeToSplitToCold()
9448 case AArch64::MOVZWi: in describeLoadedValue()
9449 case AArch64::MOVZXi: { in describeLoadedValue()
9462 case AArch64::ORRWrs: in describeLoadedValue()
9463 case AArch64::ORRXrs: in describeLoadedValue()
9491 return get(Opc).TSFlags & AArch64::ElementSizeMask; in getElementSizeForOpcode()
9495 return get(Opc).TSFlags & AArch64::InstrFlagIsPTestLike; in isPTestLikeOpcode()
9499 return get(Opc).TSFlags & AArch64::InstrFlagIsWhile; in isWhileOpcode()
9533 return AArch64::BLRNoIP; in getBLRCallOpcode()
9535 return AArch64::BLR; in getBLRCallOpcode()
9541 assert(TargetReg != AArch64::SP && "New top of stack cannot aleady be in SP"); in probedStackAlloc()
9564 emitFrameOffset(*LoopTestMBB, LoopTestMBB->end(), DL, AArch64::SP, in probedStackAlloc()
9565 AArch64::SP, StackOffset::getFixed(-ProbeSize), TII, Flags); in probedStackAlloc()
9568 BuildMI(*LoopTestMBB, LoopTestMBB->end(), DL, TII->get(AArch64::SUBSXrx64), in probedStackAlloc()
9569 AArch64::XZR) in probedStackAlloc()
9570 .addReg(AArch64::SP) in probedStackAlloc()
9576 BuildMI(*LoopTestMBB, LoopTestMBB->end(), DL, TII->get(AArch64::Bcc)) in probedStackAlloc()
9582 BuildMI(*LoopBodyMBB, LoopBodyMBB->end(), DL, TII->get(AArch64::STRXui)) in probedStackAlloc()
9583 .addReg(AArch64::XZR) in probedStackAlloc()
9584 .addReg(AArch64::SP) in probedStackAlloc()
9589 BuildMI(*LoopBodyMBB, LoopBodyMBB->end(), DL, TII->get(AArch64::B)) in probedStackAlloc()
9595 BuildMI(*ExitMBB, ExitMBB->end(), DL, TII->get(AArch64::ADDXri), AArch64::SP) in probedStackAlloc()
9602 BuildMI(*ExitMBB, ExitMBB->end(), DL, TII->get(AArch64::LDRXui)) in probedStackAlloc()
9603 .addReg(AArch64::XZR, RegState::Define) in probedStackAlloc()
9604 .addReg(AArch64::SP) in probedStackAlloc()
9737 assert(CondBranch->getOpcode() == AArch64::Bcc); in createRemainingIterationsGreaterCondition()
9745 Register AccCond = AArch64::XZR; in createRemainingIterationsGreaterCondition()
9750 Register NewCond = MRI.createVirtualRegister(&AArch64::GPR64commonRegClass); in createRemainingIterationsGreaterCondition()
9751 BuildMI(MBB, MBB.end(), Comp->getDebugLoc(), TII->get(AArch64::CSINCXr)) in createRemainingIterationsGreaterCondition()
9816 BuildMI(MBB, MBB.end(), Comp->getDebugLoc(), TII->get(AArch64::SUBSXri)) in createRemainingIterationsGreaterCondition()
9817 .addReg(AArch64::XZR, RegState::Define | RegState::Dead) in createRemainingIterationsGreaterCondition()
9890 case AArch64::ADDSXri: in getIndVarInfo()
9891 case AArch64::ADDSWri: in getIndVarInfo()
9892 case AArch64::SUBSXri: in getIndVarInfo()
9893 case AArch64::SUBSWri: in getIndVarInfo()
9894 case AArch64::ADDXri: in getIndVarInfo()
9895 case AArch64::ADDWri: in getIndVarInfo()
9896 case AArch64::SUBXri: in getIndVarInfo()
9897 case AArch64::SUBWri: in getIndVarInfo()
9901 case AArch64::ADDSXrr: in getIndVarInfo()
9902 case AArch64::ADDSWrr: in getIndVarInfo()
9903 case AArch64::SUBSXrr: in getIndVarInfo()
9904 case AArch64::SUBSWrr: in getIndVarInfo()
9905 case AArch64::ADDXrr: in getIndVarInfo()
9906 case AArch64::ADDWrr: in getIndVarInfo()
9907 case AArch64::SUBXrr: in getIndVarInfo()
9908 case AArch64::SUBWrr: in getIndVarInfo()
9970 if (CondBranch->getOpcode() != AArch64::Bcc) in analyzeLoopForPipelining()
9980 if (MI.modifiesRegister(AArch64::NZCV, &TRI)) { in analyzeLoopForPipelining()
9985 case AArch64::SUBSXri: in analyzeLoopForPipelining()
9986 case AArch64::SUBSWri: in analyzeLoopForPipelining()
9987 case AArch64::ADDSXri: in analyzeLoopForPipelining()
9988 case AArch64::ADDSWri: in analyzeLoopForPipelining()
9992 case AArch64::ADDSWrr: in analyzeLoopForPipelining()
9993 case AArch64::ADDSXrr: in analyzeLoopForPipelining()
9994 case AArch64::SUBSWrr: in analyzeLoopForPipelining()
9995 case AArch64::SUBSXrr: in analyzeLoopForPipelining()