xref: /freebsd/contrib/llvm-project/llvm/lib/Target/ARM/MVETailPredUtils.h (revision fe6060f10f634930ff71b7c50291ddc610da2475)
1 //===-- MVETailPredUtils.h - Tail predication utility functions -*- C++-*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file contains utility functions for low overhead and tail predicated
10 // loops, shared between the ARMLowOverheadLoops pass and anywhere else that
11 // needs them.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #ifndef LLVM_LIB_TARGET_ARM_MVETAILPREDUTILS_H
16 #define LLVM_LIB_TARGET_ARM_MVETAILPREDUTILS_H
17 
18 #include "llvm/CodeGen/MachineInstr.h"
19 #include "llvm/CodeGen/MachineInstrBuilder.h"
20 #include "llvm/CodeGen/MachineOperand.h"
21 #include "llvm/CodeGen/TargetInstrInfo.h"
22 
23 namespace llvm {
24 
VCTPOpcodeToLSTP(unsigned Opcode,bool IsDoLoop)25 static inline unsigned VCTPOpcodeToLSTP(unsigned Opcode, bool IsDoLoop) {
26   switch (Opcode) {
27   default:
28     llvm_unreachable("unhandled vctp opcode");
29     break;
30   case ARM::MVE_VCTP8:
31     return IsDoLoop ? ARM::MVE_DLSTP_8 : ARM::MVE_WLSTP_8;
32   case ARM::MVE_VCTP16:
33     return IsDoLoop ? ARM::MVE_DLSTP_16 : ARM::MVE_WLSTP_16;
34   case ARM::MVE_VCTP32:
35     return IsDoLoop ? ARM::MVE_DLSTP_32 : ARM::MVE_WLSTP_32;
36   case ARM::MVE_VCTP64:
37     return IsDoLoop ? ARM::MVE_DLSTP_64 : ARM::MVE_WLSTP_64;
38   }
39   return 0;
40 }
41 
getTailPredVectorWidth(unsigned Opcode)42 static inline unsigned getTailPredVectorWidth(unsigned Opcode) {
43   switch (Opcode) {
44   default:
45     llvm_unreachable("unhandled vctp opcode");
46   case ARM::MVE_VCTP8:
47     return 16;
48   case ARM::MVE_VCTP16:
49     return 8;
50   case ARM::MVE_VCTP32:
51     return 4;
52   case ARM::MVE_VCTP64:
53     return 2;
54   }
55   return 0;
56 }
57 
isVCTP(const MachineInstr * MI)58 static inline bool isVCTP(const MachineInstr *MI) {
59   switch (MI->getOpcode()) {
60   default:
61     break;
62   case ARM::MVE_VCTP8:
63   case ARM::MVE_VCTP16:
64   case ARM::MVE_VCTP32:
65   case ARM::MVE_VCTP64:
66     return true;
67   }
68   return false;
69 }
70 
isDoLoopStart(const MachineInstr & MI)71 static inline bool isDoLoopStart(const MachineInstr &MI) {
72   return MI.getOpcode() == ARM::t2DoLoopStart ||
73          MI.getOpcode() == ARM::t2DoLoopStartTP;
74 }
75 
isWhileLoopStart(const MachineInstr & MI)76 static inline bool isWhileLoopStart(const MachineInstr &MI) {
77   return MI.getOpcode() == ARM::t2WhileLoopStart ||
78          MI.getOpcode() == ARM::t2WhileLoopStartLR ||
79          MI.getOpcode() == ARM::t2WhileLoopStartTP;
80 }
81 
isLoopStart(const MachineInstr & MI)82 static inline bool isLoopStart(const MachineInstr &MI) {
83   return isDoLoopStart(MI) || isWhileLoopStart(MI);
84 }
85 
86 // Return the TargetBB stored in a t2WhileLoopStartLR/t2WhileLoopStartTP.
getWhileLoopStartTargetBB(const MachineInstr & MI)87 inline MachineBasicBlock *getWhileLoopStartTargetBB(const MachineInstr &MI) {
88   assert(isWhileLoopStart(MI) && "Expected WhileLoopStart!");
89   unsigned Op = MI.getOpcode() == ARM::t2WhileLoopStartTP ? 3 : 2;
90   return MI.getOperand(Op).getMBB();
91 }
92 
93 // WhileLoopStart holds the exit block, so produce a subs Op0, Op1, 0 and then a
94 // beq that branches to the exit branch.
95 // If UseCmp is true, this will create a t2CMP instead of a t2SUBri, meaning the
96 // value of LR into the loop will not be setup. This is used if the LR setup is
97 // done via another means (via a t2DoLoopStart, for example).
98 inline void RevertWhileLoopStartLR(MachineInstr *MI, const TargetInstrInfo *TII,
99                                    unsigned BrOpc = ARM::t2Bcc,
100                                    bool UseCmp = false) {
101   MachineBasicBlock *MBB = MI->getParent();
102   assert((MI->getOpcode() == ARM::t2WhileLoopStartLR ||
103           MI->getOpcode() == ARM::t2WhileLoopStartTP) &&
104          "Only expected a t2WhileLoopStartLR/TP in RevertWhileLoopStartLR!");
105 
106   // Subs/Cmp
107   if (UseCmp) {
108     MachineInstrBuilder MIB =
109         BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2CMPri));
110     MIB.add(MI->getOperand(1));
111     MIB.addImm(0);
112     MIB.addImm(ARMCC::AL);
113     MIB.addReg(ARM::NoRegister);
114   } else {
115     MachineInstrBuilder MIB =
116         BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2SUBri));
117     MIB.add(MI->getOperand(0));
118     MIB.add(MI->getOperand(1));
119     MIB.addImm(0);
120     MIB.addImm(ARMCC::AL);
121     MIB.addReg(ARM::NoRegister);
122     MIB.addReg(ARM::CPSR, RegState::Define);
123   }
124 
125   // Branch
126   MachineInstrBuilder MIB =
127       BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(BrOpc));
128   MIB.addMBB(getWhileLoopStartTargetBB(*MI)); // branch target
129   MIB.addImm(ARMCC::EQ);                      // condition code
130   MIB.addReg(ARM::CPSR);
131 
132   MI->eraseFromParent();
133 }
134 
RevertDoLoopStart(MachineInstr * MI,const TargetInstrInfo * TII)135 inline void RevertDoLoopStart(MachineInstr *MI, const TargetInstrInfo *TII) {
136   MachineBasicBlock *MBB = MI->getParent();
137   BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::tMOVr))
138       .add(MI->getOperand(0))
139       .add(MI->getOperand(1))
140       .add(predOps(ARMCC::AL));
141 
142   MI->eraseFromParent();
143 }
144 
145 inline void RevertLoopDec(MachineInstr *MI, const TargetInstrInfo *TII,
146                           bool SetFlags = false) {
147   MachineBasicBlock *MBB = MI->getParent();
148 
149   MachineInstrBuilder MIB =
150       BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2SUBri));
151   MIB.add(MI->getOperand(0));
152   MIB.add(MI->getOperand(1));
153   MIB.add(MI->getOperand(2));
154   MIB.addImm(ARMCC::AL);
155   MIB.addReg(0);
156 
157   if (SetFlags) {
158     MIB.addReg(ARM::CPSR);
159     MIB->getOperand(5).setIsDef(true);
160   } else
161     MIB.addReg(0);
162 
163   MI->eraseFromParent();
164 }
165 
166 // Generate a subs, or sub and cmp, and a branch instead of an LE.
167 inline void RevertLoopEnd(MachineInstr *MI, const TargetInstrInfo *TII,
168                           unsigned BrOpc = ARM::t2Bcc, bool SkipCmp = false) {
169   MachineBasicBlock *MBB = MI->getParent();
170 
171   // Create cmp
172   if (!SkipCmp) {
173     MachineInstrBuilder MIB =
174         BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(ARM::t2CMPri));
175     MIB.add(MI->getOperand(0));
176     MIB.addImm(0);
177     MIB.addImm(ARMCC::AL);
178     MIB.addReg(ARM::NoRegister);
179   }
180 
181   // Create bne
182   MachineInstrBuilder MIB =
183       BuildMI(*MBB, MI, MI->getDebugLoc(), TII->get(BrOpc));
184   MIB.add(MI->getOperand(1)); // branch target
185   MIB.addImm(ARMCC::NE);      // condition code
186   MIB.addReg(ARM::CPSR);
187   MI->eraseFromParent();
188 }
189 
190 } // end namespace llvm
191 
192 #endif // LLVM_LIB_TARGET_ARM_MVETAILPREDUTILS_H
193