Lines Matching refs:RISCV

68   return MI.getOpcode() == RISCV::PseudoVSETVLI ||  in isVectorConfigInstr()
69 MI.getOpcode() == RISCV::PseudoVSETVLIX0 || in isVectorConfigInstr()
70 MI.getOpcode() == RISCV::PseudoVSETIVLI; in isVectorConfigInstr()
76 if (MI.getOpcode() != RISCV::PseudoVSETVLIX0) in isVLPreservingConfig()
78 assert(RISCV::X0 == MI.getOperand(1).getReg()); in isVLPreservingConfig()
79 return RISCV::X0 == MI.getOperand(0).getReg(); in isVLPreservingConfig()
83 switch (RISCV::getRVVMCOpcode(MI.getOpcode())) { in isFloatScalarMoveOrScalarSplatInstr()
86 case RISCV::VFMV_S_F: in isFloatScalarMoveOrScalarSplatInstr()
87 case RISCV::VFMV_V_F: in isFloatScalarMoveOrScalarSplatInstr()
93 switch (RISCV::getRVVMCOpcode(MI.getOpcode())) { in isScalarExtractInstr()
96 case RISCV::VMV_X_S: in isScalarExtractInstr()
97 case RISCV::VFMV_F_S: in isScalarExtractInstr()
103 switch (RISCV::getRVVMCOpcode(MI.getOpcode())) { in isScalarInsertInstr()
106 case RISCV::VMV_S_X: in isScalarInsertInstr()
107 case RISCV::VFMV_S_F: in isScalarInsertInstr()
113 switch (RISCV::getRVVMCOpcode(MI.getOpcode())) { in isScalarSplatInstr()
116 case RISCV::VMV_V_I: in isScalarSplatInstr()
117 case RISCV::VMV_V_X: in isScalarSplatInstr()
118 case RISCV::VFMV_V_F: in isScalarSplatInstr()
124 switch (RISCV::getRVVMCOpcode(MI.getOpcode())) { in isVSlideInstr()
127 case RISCV::VSLIDEDOWN_VX: in isVSlideInstr()
128 case RISCV::VSLIDEDOWN_VI: in isVSlideInstr()
129 case RISCV::VSLIDEUP_VX: in isVSlideInstr()
130 case RISCV::VSLIDEUP_VI: in isVSlideInstr()
138 switch (RISCV::getRVVMCOpcode(MI.getOpcode())) { in getEEWForLoadStore()
141 case RISCV::VLE8_V: in getEEWForLoadStore()
142 case RISCV::VLSE8_V: in getEEWForLoadStore()
143 case RISCV::VSE8_V: in getEEWForLoadStore()
144 case RISCV::VSSE8_V: in getEEWForLoadStore()
146 case RISCV::VLE16_V: in getEEWForLoadStore()
147 case RISCV::VLSE16_V: in getEEWForLoadStore()
148 case RISCV::VSE16_V: in getEEWForLoadStore()
149 case RISCV::VSSE16_V: in getEEWForLoadStore()
151 case RISCV::VLE32_V: in getEEWForLoadStore()
152 case RISCV::VLSE32_V: in getEEWForLoadStore()
153 case RISCV::VSE32_V: in getEEWForLoadStore()
154 case RISCV::VSSE32_V: in getEEWForLoadStore()
156 case RISCV::VLE64_V: in getEEWForLoadStore()
157 case RISCV::VLSE64_V: in getEEWForLoadStore()
158 case RISCV::VSE64_V: in getEEWForLoadStore()
159 case RISCV::VSSE64_V: in getEEWForLoadStore()
165 return MI.getOpcode() == RISCV::ADDI && in isNonZeroLoadImmediate()
167 MI.getOperand(1).getReg() == RISCV::X0 && in isNonZeroLoadImmediate()
196 return UseMO.getReg() == RISCV::NoRegister || UseMO.isUndef(); in hasUndefinedMergeOp()
396 MI.readsRegister(RISCV::VL, /*TRI=*/nullptr)) in getDemanded()
399 MI.readsRegister(RISCV::VTYPE, /*TRI=*/nullptr)) in getDemanded()
961 if (MI.getOpcode() == RISCV::PseudoVSETIVLI) { in getInfoForVSETVLI()
964 assert(MI.getOpcode() == RISCV::PseudoVSETVLI || in getInfoForVSETVLI()
965 MI.getOpcode() == RISCV::PseudoVSETVLIX0); in getInfoForVSETVLI()
967 assert((AVLReg != RISCV::X0 || MI.getOperand(0).getReg() != RISCV::X0) && in getInfoForVSETVLI()
969 if (AVLReg == RISCV::X0) in getInfoForVSETVLI()
1039 if (Imm == RISCV::VLMaxSentinel) { in computeInfoForInstr()
1084 auto MI = BuildMI(MBB, InsertPt, DL, TII->get(RISCV::PseudoVSETVLIX0)) in insertVSETVLI()
1085 .addReg(RISCV::X0, RegState::Define | RegState::Dead) in insertVSETVLI()
1086 .addReg(RISCV::X0, RegState::Kill) in insertVSETVLI()
1088 .addReg(RISCV::VL, RegState::Implicit); in insertVSETVLI()
1102 auto MI = BuildMI(MBB, InsertPt, DL, TII->get(RISCV::PseudoVSETVLIX0)) in insertVSETVLI()
1103 .addReg(RISCV::X0, RegState::Define | RegState::Dead) in insertVSETVLI()
1104 .addReg(RISCV::X0, RegState::Kill) in insertVSETVLI()
1106 .addReg(RISCV::VL, RegState::Implicit); in insertVSETVLI()
1116 auto MI = BuildMI(MBB, InsertPt, DL, TII->get(RISCV::PseudoVSETIVLI)) in insertVSETVLI()
1117 .addReg(RISCV::X0, RegState::Define | RegState::Dead) in insertVSETVLI()
1126 Register DestReg = MRI->createVirtualRegister(&RISCV::GPRRegClass); in insertVSETVLI()
1127 auto MI = BuildMI(MBB, InsertPt, DL, TII->get(RISCV::PseudoVSETVLIX0)) in insertVSETVLI()
1129 .addReg(RISCV::X0, RegState::Kill) in insertVSETVLI()
1139 MRI->constrainRegClass(AVLReg, &RISCV::GPRNoX0RegClass); in insertVSETVLI()
1140 auto MI = BuildMI(MBB, InsertPt, DL, TII->get(RISCV::PseudoVSETVLI)) in insertVSETVLI()
1141 .addReg(RISCV::X0, RegState::Define | RegState::Dead) in insertVSETVLI()
1155 MRI->createVirtualRegister(&RISCV::GPRNoX0RegClass); in insertVSETVLI()
1165 BuildMI(*II->getParent(), II, DL, TII->get(RISCV::COPY), AVLCopyReg) in insertVSETVLI()
1270 if (RISCV::isFaultFirstLoad(MI)) { in transferAfter()
1287 MI.modifiesRegister(RISCV::VL, /*TRI=*/nullptr) || in transferAfter()
1288 MI.modifiesRegister(RISCV::VTYPE, /*TRI=*/nullptr)) in transferAfter()
1422 assert(MI.getOperand(3).getReg() == RISCV::VL && in emitVSETVLIs()
1423 MI.getOperand(4).getReg() == RISCV::VTYPE && in emitVSETVLIs()
1451 VLOp.setReg(RISCV::NoRegister); in emitVSETVLIs()
1474 MI.addOperand(MachineOperand::CreateReg(RISCV::VL, /*isDef*/ false, in emitVSETVLIs()
1477 MI.addOperand(MachineOperand::CreateReg(RISCV::VTYPE, /*isDef*/ false, in emitVSETVLIs()
1482 MI.modifiesRegister(RISCV::VL, /*TRI=*/nullptr) || in emitVSETVLIs()
1483 MI.modifiesRegister(RISCV::VTYPE, /*TRI=*/nullptr)) in emitVSETVLIs()
1627 if (AVL.isReg() && AVL.getReg() != RISCV::X0 && in canMutatePriorConfig()
1670 MI.modifiesRegister(RISCV::VL, /*TRI=*/nullptr) || in coalesceVSETVLIs()
1671 MI.modifiesRegister(RISCV::VTYPE, /*TRI=*/nullptr)) in coalesceVSETVLIs()
1747 if (RISCV::isFaultFirstLoad(MI)) { in insertReadVL()
1752 TII->get(RISCV::PseudoReadVL), VLOutput); in insertReadVL()
1764 MI.getOperand(1).setReg(RISCV::X0); in insertReadVL()