1349cc55cSDimitry Andric //===- LoadStoreOpt.cpp ----------- Generic memory optimizations -*- C++ -*-==//
2349cc55cSDimitry Andric //
3349cc55cSDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4349cc55cSDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5349cc55cSDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6349cc55cSDimitry Andric //
7349cc55cSDimitry Andric //===----------------------------------------------------------------------===//
8349cc55cSDimitry Andric /// \file
9349cc55cSDimitry Andric /// This file implements the LoadStoreOpt optimization pass.
10349cc55cSDimitry Andric //===----------------------------------------------------------------------===//
11349cc55cSDimitry Andric
12349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/LoadStoreOpt.h"
1306c3fb27SDimitry Andric #include "llvm/ADT/STLExtras.h"
1406c3fb27SDimitry Andric #include "llvm/ADT/SmallPtrSet.h"
15349cc55cSDimitry Andric #include "llvm/ADT/Statistic.h"
16349cc55cSDimitry Andric #include "llvm/Analysis/AliasAnalysis.h"
17349cc55cSDimitry Andric #include "llvm/Analysis/MemoryLocation.h"
18349cc55cSDimitry Andric #include "llvm/Analysis/OptimizationRemarkEmitter.h"
19349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/GenericMachineInstrs.h"
20349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/LegalizerInfo.h"
21349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/MIPatternMatch.h"
22349cc55cSDimitry Andric #include "llvm/CodeGen/GlobalISel/Utils.h"
2306c3fb27SDimitry Andric #include "llvm/CodeGen/LowLevelTypeUtils.h"
24349cc55cSDimitry Andric #include "llvm/CodeGen/MachineBasicBlock.h"
25349cc55cSDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
26349cc55cSDimitry Andric #include "llvm/CodeGen/MachineFunction.h"
27349cc55cSDimitry Andric #include "llvm/CodeGen/MachineInstr.h"
28349cc55cSDimitry Andric #include "llvm/CodeGen/MachineOptimizationRemarkEmitter.h"
29349cc55cSDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
30349cc55cSDimitry Andric #include "llvm/CodeGen/Register.h"
31349cc55cSDimitry Andric #include "llvm/CodeGen/TargetLowering.h"
32349cc55cSDimitry Andric #include "llvm/CodeGen/TargetOpcodes.h"
33349cc55cSDimitry Andric #include "llvm/IR/DebugInfoMetadata.h"
34349cc55cSDimitry Andric #include "llvm/InitializePasses.h"
35349cc55cSDimitry Andric #include "llvm/Support/AtomicOrdering.h"
36349cc55cSDimitry Andric #include "llvm/Support/Casting.h"
37349cc55cSDimitry Andric #include "llvm/Support/Debug.h"
38349cc55cSDimitry Andric #include "llvm/Support/ErrorHandling.h"
39349cc55cSDimitry Andric #include "llvm/Support/MathExtras.h"
40349cc55cSDimitry Andric #include <algorithm>
41349cc55cSDimitry Andric
42349cc55cSDimitry Andric #define DEBUG_TYPE "loadstore-opt"
43349cc55cSDimitry Andric
44349cc55cSDimitry Andric using namespace llvm;
45349cc55cSDimitry Andric using namespace ore;
46349cc55cSDimitry Andric using namespace MIPatternMatch;
47349cc55cSDimitry Andric
48349cc55cSDimitry Andric STATISTIC(NumStoresMerged, "Number of stores merged");
49349cc55cSDimitry Andric
50349cc55cSDimitry Andric const unsigned MaxStoreSizeToForm = 128;
51349cc55cSDimitry Andric
52349cc55cSDimitry Andric char LoadStoreOpt::ID = 0;
53349cc55cSDimitry Andric INITIALIZE_PASS_BEGIN(LoadStoreOpt, DEBUG_TYPE, "Generic memory optimizations",
54349cc55cSDimitry Andric false, false)
55349cc55cSDimitry Andric INITIALIZE_PASS_END(LoadStoreOpt, DEBUG_TYPE, "Generic memory optimizations",
56349cc55cSDimitry Andric false, false)
57349cc55cSDimitry Andric
LoadStoreOpt(std::function<bool (const MachineFunction &)> F)58349cc55cSDimitry Andric LoadStoreOpt::LoadStoreOpt(std::function<bool(const MachineFunction &)> F)
59349cc55cSDimitry Andric : MachineFunctionPass(ID), DoNotRunPass(F) {}
60349cc55cSDimitry Andric
LoadStoreOpt()61349cc55cSDimitry Andric LoadStoreOpt::LoadStoreOpt()
62349cc55cSDimitry Andric : LoadStoreOpt([](const MachineFunction &) { return false; }) {}
63349cc55cSDimitry Andric
init(MachineFunction & MF)64349cc55cSDimitry Andric void LoadStoreOpt::init(MachineFunction &MF) {
65349cc55cSDimitry Andric this->MF = &MF;
66349cc55cSDimitry Andric MRI = &MF.getRegInfo();
67349cc55cSDimitry Andric AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
68349cc55cSDimitry Andric TLI = MF.getSubtarget().getTargetLowering();
69349cc55cSDimitry Andric LI = MF.getSubtarget().getLegalizerInfo();
70349cc55cSDimitry Andric Builder.setMF(MF);
71349cc55cSDimitry Andric IsPreLegalizer = !MF.getProperties().hasProperty(
72349cc55cSDimitry Andric MachineFunctionProperties::Property::Legalized);
73349cc55cSDimitry Andric InstsToErase.clear();
74349cc55cSDimitry Andric }
75349cc55cSDimitry Andric
getAnalysisUsage(AnalysisUsage & AU) const76349cc55cSDimitry Andric void LoadStoreOpt::getAnalysisUsage(AnalysisUsage &AU) const {
77349cc55cSDimitry Andric AU.addRequired<AAResultsWrapperPass>();
7881ad6265SDimitry Andric AU.setPreservesAll();
79349cc55cSDimitry Andric getSelectionDAGFallbackAnalysisUsage(AU);
80349cc55cSDimitry Andric MachineFunctionPass::getAnalysisUsage(AU);
81349cc55cSDimitry Andric }
82349cc55cSDimitry Andric
getPointerInfo(Register Ptr,MachineRegisterInfo & MRI)83349cc55cSDimitry Andric BaseIndexOffset GISelAddressing::getPointerInfo(Register Ptr,
84349cc55cSDimitry Andric MachineRegisterInfo &MRI) {
85349cc55cSDimitry Andric BaseIndexOffset Info;
86349cc55cSDimitry Andric Register PtrAddRHS;
875678d1d9SDimitry Andric Register BaseReg;
885678d1d9SDimitry Andric if (!mi_match(Ptr, MRI, m_GPtrAdd(m_Reg(BaseReg), m_Reg(PtrAddRHS)))) {
895678d1d9SDimitry Andric Info.setBase(Ptr);
905678d1d9SDimitry Andric Info.setOffset(0);
91349cc55cSDimitry Andric return Info;
92349cc55cSDimitry Andric }
935678d1d9SDimitry Andric Info.setBase(BaseReg);
94349cc55cSDimitry Andric auto RHSCst = getIConstantVRegValWithLookThrough(PtrAddRHS, MRI);
95349cc55cSDimitry Andric if (RHSCst)
965678d1d9SDimitry Andric Info.setOffset(RHSCst->Value.getSExtValue());
97349cc55cSDimitry Andric
98349cc55cSDimitry Andric // Just recognize a simple case for now. In future we'll need to match
99349cc55cSDimitry Andric // indexing patterns for base + index + constant.
1005678d1d9SDimitry Andric Info.setIndex(PtrAddRHS);
101349cc55cSDimitry Andric return Info;
102349cc55cSDimitry Andric }
103349cc55cSDimitry Andric
aliasIsKnownForLoadStore(const MachineInstr & MI1,const MachineInstr & MI2,bool & IsAlias,MachineRegisterInfo & MRI)104349cc55cSDimitry Andric bool GISelAddressing::aliasIsKnownForLoadStore(const MachineInstr &MI1,
105349cc55cSDimitry Andric const MachineInstr &MI2,
106349cc55cSDimitry Andric bool &IsAlias,
107349cc55cSDimitry Andric MachineRegisterInfo &MRI) {
108349cc55cSDimitry Andric auto *LdSt1 = dyn_cast<GLoadStore>(&MI1);
109349cc55cSDimitry Andric auto *LdSt2 = dyn_cast<GLoadStore>(&MI2);
110349cc55cSDimitry Andric if (!LdSt1 || !LdSt2)
111349cc55cSDimitry Andric return false;
112349cc55cSDimitry Andric
113349cc55cSDimitry Andric BaseIndexOffset BasePtr0 = getPointerInfo(LdSt1->getPointerReg(), MRI);
114349cc55cSDimitry Andric BaseIndexOffset BasePtr1 = getPointerInfo(LdSt2->getPointerReg(), MRI);
115349cc55cSDimitry Andric
1165678d1d9SDimitry Andric if (!BasePtr0.getBase().isValid() || !BasePtr1.getBase().isValid())
117349cc55cSDimitry Andric return false;
118349cc55cSDimitry Andric
119*0fca6ea1SDimitry Andric LocationSize Size1 = LdSt1->getMemSize();
120*0fca6ea1SDimitry Andric LocationSize Size2 = LdSt2->getMemSize();
121349cc55cSDimitry Andric
122349cc55cSDimitry Andric int64_t PtrDiff;
1235678d1d9SDimitry Andric if (BasePtr0.getBase() == BasePtr1.getBase() && BasePtr0.hasValidOffset() &&
1245678d1d9SDimitry Andric BasePtr1.hasValidOffset()) {
1255678d1d9SDimitry Andric PtrDiff = BasePtr1.getOffset() - BasePtr0.getOffset();
126349cc55cSDimitry Andric // If the size of memory access is unknown, do not use it to do analysis.
127349cc55cSDimitry Andric // One example of unknown size memory access is to load/store scalable
128349cc55cSDimitry Andric // vector objects on the stack.
129349cc55cSDimitry Andric // BasePtr1 is PtrDiff away from BasePtr0. They alias if none of the
130349cc55cSDimitry Andric // following situations arise:
131*0fca6ea1SDimitry Andric if (PtrDiff >= 0 && Size1.hasValue() && !Size1.isScalable()) {
132349cc55cSDimitry Andric // [----BasePtr0----]
133349cc55cSDimitry Andric // [---BasePtr1--]
134349cc55cSDimitry Andric // ========PtrDiff========>
135*0fca6ea1SDimitry Andric IsAlias = !((int64_t)Size1.getValue() <= PtrDiff);
136349cc55cSDimitry Andric return true;
137349cc55cSDimitry Andric }
138*0fca6ea1SDimitry Andric if (PtrDiff < 0 && Size2.hasValue() && !Size2.isScalable()) {
139349cc55cSDimitry Andric // [----BasePtr0----]
140349cc55cSDimitry Andric // [---BasePtr1--]
141349cc55cSDimitry Andric // =====(-PtrDiff)====>
142*0fca6ea1SDimitry Andric IsAlias = !((PtrDiff + (int64_t)Size2.getValue()) <= 0);
143349cc55cSDimitry Andric return true;
144349cc55cSDimitry Andric }
145349cc55cSDimitry Andric return false;
146349cc55cSDimitry Andric }
147349cc55cSDimitry Andric
148349cc55cSDimitry Andric // If both BasePtr0 and BasePtr1 are FrameIndexes, we will not be
149349cc55cSDimitry Andric // able to calculate their relative offset if at least one arises
150349cc55cSDimitry Andric // from an alloca. However, these allocas cannot overlap and we
151349cc55cSDimitry Andric // can infer there is no alias.
1525678d1d9SDimitry Andric auto *Base0Def = getDefIgnoringCopies(BasePtr0.getBase(), MRI);
1535678d1d9SDimitry Andric auto *Base1Def = getDefIgnoringCopies(BasePtr1.getBase(), MRI);
154349cc55cSDimitry Andric if (!Base0Def || !Base1Def)
155349cc55cSDimitry Andric return false; // Couldn't tell anything.
156349cc55cSDimitry Andric
157349cc55cSDimitry Andric
158349cc55cSDimitry Andric if (Base0Def->getOpcode() != Base1Def->getOpcode())
159349cc55cSDimitry Andric return false;
160349cc55cSDimitry Andric
161349cc55cSDimitry Andric if (Base0Def->getOpcode() == TargetOpcode::G_FRAME_INDEX) {
162349cc55cSDimitry Andric MachineFrameInfo &MFI = Base0Def->getMF()->getFrameInfo();
163349cc55cSDimitry Andric // If the bases have the same frame index but we couldn't find a
164349cc55cSDimitry Andric // constant offset, (indices are different) be conservative.
165349cc55cSDimitry Andric if (Base0Def != Base1Def &&
166349cc55cSDimitry Andric (!MFI.isFixedObjectIndex(Base0Def->getOperand(1).getIndex()) ||
167349cc55cSDimitry Andric !MFI.isFixedObjectIndex(Base1Def->getOperand(1).getIndex()))) {
168349cc55cSDimitry Andric IsAlias = false;
169349cc55cSDimitry Andric return true;
170349cc55cSDimitry Andric }
171349cc55cSDimitry Andric }
172349cc55cSDimitry Andric
173349cc55cSDimitry Andric // This implementation is a lot more primitive than the SDAG one for now.
174349cc55cSDimitry Andric // FIXME: what about constant pools?
175349cc55cSDimitry Andric if (Base0Def->getOpcode() == TargetOpcode::G_GLOBAL_VALUE) {
176349cc55cSDimitry Andric auto GV0 = Base0Def->getOperand(1).getGlobal();
177349cc55cSDimitry Andric auto GV1 = Base1Def->getOperand(1).getGlobal();
178349cc55cSDimitry Andric if (GV0 != GV1) {
179349cc55cSDimitry Andric IsAlias = false;
180349cc55cSDimitry Andric return true;
181349cc55cSDimitry Andric }
182349cc55cSDimitry Andric }
183349cc55cSDimitry Andric
184349cc55cSDimitry Andric // Can't tell anything about aliasing.
185349cc55cSDimitry Andric return false;
186349cc55cSDimitry Andric }
187349cc55cSDimitry Andric
instMayAlias(const MachineInstr & MI,const MachineInstr & Other,MachineRegisterInfo & MRI,AliasAnalysis * AA)188349cc55cSDimitry Andric bool GISelAddressing::instMayAlias(const MachineInstr &MI,
189349cc55cSDimitry Andric const MachineInstr &Other,
190349cc55cSDimitry Andric MachineRegisterInfo &MRI,
191349cc55cSDimitry Andric AliasAnalysis *AA) {
192349cc55cSDimitry Andric struct MemUseCharacteristics {
193349cc55cSDimitry Andric bool IsVolatile;
194349cc55cSDimitry Andric bool IsAtomic;
195349cc55cSDimitry Andric Register BasePtr;
196349cc55cSDimitry Andric int64_t Offset;
197*0fca6ea1SDimitry Andric LocationSize NumBytes;
198349cc55cSDimitry Andric MachineMemOperand *MMO;
199349cc55cSDimitry Andric };
200349cc55cSDimitry Andric
201349cc55cSDimitry Andric auto getCharacteristics =
202349cc55cSDimitry Andric [&](const MachineInstr *MI) -> MemUseCharacteristics {
203349cc55cSDimitry Andric if (const auto *LS = dyn_cast<GLoadStore>(MI)) {
204349cc55cSDimitry Andric Register BaseReg;
205349cc55cSDimitry Andric int64_t Offset = 0;
206349cc55cSDimitry Andric // No pre/post-inc addressing modes are considered here, unlike in SDAG.
207349cc55cSDimitry Andric if (!mi_match(LS->getPointerReg(), MRI,
208349cc55cSDimitry Andric m_GPtrAdd(m_Reg(BaseReg), m_ICst(Offset)))) {
209349cc55cSDimitry Andric BaseReg = LS->getPointerReg();
210349cc55cSDimitry Andric Offset = 0;
211349cc55cSDimitry Andric }
212349cc55cSDimitry Andric
213*0fca6ea1SDimitry Andric LocationSize Size = LS->getMMO().getSize();
214349cc55cSDimitry Andric return {LS->isVolatile(), LS->isAtomic(), BaseReg,
215349cc55cSDimitry Andric Offset /*base offset*/, Size, &LS->getMMO()};
216349cc55cSDimitry Andric }
217349cc55cSDimitry Andric // FIXME: support recognizing lifetime instructions.
218349cc55cSDimitry Andric // Default.
219349cc55cSDimitry Andric return {false /*isvolatile*/,
220*0fca6ea1SDimitry Andric /*isAtomic*/ false,
221*0fca6ea1SDimitry Andric Register(),
222*0fca6ea1SDimitry Andric (int64_t)0 /*offset*/,
223*0fca6ea1SDimitry Andric LocationSize::beforeOrAfterPointer() /*size*/,
224349cc55cSDimitry Andric (MachineMemOperand *)nullptr};
225349cc55cSDimitry Andric };
226349cc55cSDimitry Andric MemUseCharacteristics MUC0 = getCharacteristics(&MI),
227349cc55cSDimitry Andric MUC1 = getCharacteristics(&Other);
228349cc55cSDimitry Andric
229349cc55cSDimitry Andric // If they are to the same address, then they must be aliases.
230349cc55cSDimitry Andric if (MUC0.BasePtr.isValid() && MUC0.BasePtr == MUC1.BasePtr &&
231349cc55cSDimitry Andric MUC0.Offset == MUC1.Offset)
232349cc55cSDimitry Andric return true;
233349cc55cSDimitry Andric
234349cc55cSDimitry Andric // If they are both volatile then they cannot be reordered.
235349cc55cSDimitry Andric if (MUC0.IsVolatile && MUC1.IsVolatile)
236349cc55cSDimitry Andric return true;
237349cc55cSDimitry Andric
238349cc55cSDimitry Andric // Be conservative about atomics for the moment
239349cc55cSDimitry Andric // TODO: This is way overconservative for unordered atomics (see D66309)
240349cc55cSDimitry Andric if (MUC0.IsAtomic && MUC1.IsAtomic)
241349cc55cSDimitry Andric return true;
242349cc55cSDimitry Andric
243349cc55cSDimitry Andric // If one operation reads from invariant memory, and the other may store, they
244349cc55cSDimitry Andric // cannot alias.
245349cc55cSDimitry Andric if (MUC0.MMO && MUC1.MMO) {
246349cc55cSDimitry Andric if ((MUC0.MMO->isInvariant() && MUC1.MMO->isStore()) ||
247349cc55cSDimitry Andric (MUC1.MMO->isInvariant() && MUC0.MMO->isStore()))
248349cc55cSDimitry Andric return false;
249349cc55cSDimitry Andric }
250349cc55cSDimitry Andric
251*0fca6ea1SDimitry Andric // If NumBytes is scalable and offset is not 0, conservatively return may
252*0fca6ea1SDimitry Andric // alias
253*0fca6ea1SDimitry Andric if ((MUC0.NumBytes.isScalable() && MUC0.Offset != 0) ||
254*0fca6ea1SDimitry Andric (MUC1.NumBytes.isScalable() && MUC1.Offset != 0))
255*0fca6ea1SDimitry Andric return true;
256*0fca6ea1SDimitry Andric
257*0fca6ea1SDimitry Andric const bool BothNotScalable =
258*0fca6ea1SDimitry Andric !MUC0.NumBytes.isScalable() && !MUC1.NumBytes.isScalable();
259*0fca6ea1SDimitry Andric
260349cc55cSDimitry Andric // Try to prove that there is aliasing, or that there is no aliasing. Either
261349cc55cSDimitry Andric // way, we can return now. If nothing can be proved, proceed with more tests.
262349cc55cSDimitry Andric bool IsAlias;
263*0fca6ea1SDimitry Andric if (BothNotScalable &&
264*0fca6ea1SDimitry Andric GISelAddressing::aliasIsKnownForLoadStore(MI, Other, IsAlias, MRI))
265349cc55cSDimitry Andric return IsAlias;
266349cc55cSDimitry Andric
267349cc55cSDimitry Andric // The following all rely on MMO0 and MMO1 being valid.
268349cc55cSDimitry Andric if (!MUC0.MMO || !MUC1.MMO)
269349cc55cSDimitry Andric return true;
270349cc55cSDimitry Andric
271349cc55cSDimitry Andric // FIXME: port the alignment based alias analysis from SDAG's isAlias().
272349cc55cSDimitry Andric int64_t SrcValOffset0 = MUC0.MMO->getOffset();
273349cc55cSDimitry Andric int64_t SrcValOffset1 = MUC1.MMO->getOffset();
274*0fca6ea1SDimitry Andric LocationSize Size0 = MUC0.NumBytes;
275*0fca6ea1SDimitry Andric LocationSize Size1 = MUC1.NumBytes;
276*0fca6ea1SDimitry Andric if (AA && MUC0.MMO->getValue() && MUC1.MMO->getValue() && Size0.hasValue() &&
277*0fca6ea1SDimitry Andric Size1.hasValue()) {
278349cc55cSDimitry Andric // Use alias analysis information.
279349cc55cSDimitry Andric int64_t MinOffset = std::min(SrcValOffset0, SrcValOffset1);
280*0fca6ea1SDimitry Andric int64_t Overlap0 =
281*0fca6ea1SDimitry Andric Size0.getValue().getKnownMinValue() + SrcValOffset0 - MinOffset;
282*0fca6ea1SDimitry Andric int64_t Overlap1 =
283*0fca6ea1SDimitry Andric Size1.getValue().getKnownMinValue() + SrcValOffset1 - MinOffset;
284*0fca6ea1SDimitry Andric LocationSize Loc0 =
285*0fca6ea1SDimitry Andric Size0.isScalable() ? Size0 : LocationSize::precise(Overlap0);
286*0fca6ea1SDimitry Andric LocationSize Loc1 =
287*0fca6ea1SDimitry Andric Size1.isScalable() ? Size1 : LocationSize::precise(Overlap1);
288*0fca6ea1SDimitry Andric
289*0fca6ea1SDimitry Andric if (AA->isNoAlias(
290*0fca6ea1SDimitry Andric MemoryLocation(MUC0.MMO->getValue(), Loc0, MUC0.MMO->getAAInfo()),
291*0fca6ea1SDimitry Andric MemoryLocation(MUC1.MMO->getValue(), Loc1, MUC1.MMO->getAAInfo())))
292349cc55cSDimitry Andric return false;
293349cc55cSDimitry Andric }
294349cc55cSDimitry Andric
295349cc55cSDimitry Andric // Otherwise we have to assume they alias.
296349cc55cSDimitry Andric return true;
297349cc55cSDimitry Andric }
298349cc55cSDimitry Andric
299349cc55cSDimitry Andric /// Returns true if the instruction creates an unavoidable hazard that
300349cc55cSDimitry Andric /// forces a boundary between store merge candidates.
isInstHardMergeHazard(MachineInstr & MI)301349cc55cSDimitry Andric static bool isInstHardMergeHazard(MachineInstr &MI) {
302349cc55cSDimitry Andric return MI.hasUnmodeledSideEffects() || MI.hasOrderedMemoryRef();
303349cc55cSDimitry Andric }
304349cc55cSDimitry Andric
mergeStores(SmallVectorImpl<GStore * > & StoresToMerge)305349cc55cSDimitry Andric bool LoadStoreOpt::mergeStores(SmallVectorImpl<GStore *> &StoresToMerge) {
306349cc55cSDimitry Andric // Try to merge all the stores in the vector, splitting into separate segments
307349cc55cSDimitry Andric // as necessary.
308349cc55cSDimitry Andric assert(StoresToMerge.size() > 1 && "Expected multiple stores to merge");
309349cc55cSDimitry Andric LLT OrigTy = MRI->getType(StoresToMerge[0]->getValueReg());
310349cc55cSDimitry Andric LLT PtrTy = MRI->getType(StoresToMerge[0]->getPointerReg());
311349cc55cSDimitry Andric unsigned AS = PtrTy.getAddressSpace();
312349cc55cSDimitry Andric // Ensure the legal store info is computed for this address space.
313349cc55cSDimitry Andric initializeStoreMergeTargetInfo(AS);
314349cc55cSDimitry Andric const auto &LegalSizes = LegalStoreSizes[AS];
315349cc55cSDimitry Andric
316349cc55cSDimitry Andric #ifndef NDEBUG
317fcaf7f86SDimitry Andric for (auto *StoreMI : StoresToMerge)
318349cc55cSDimitry Andric assert(MRI->getType(StoreMI->getValueReg()) == OrigTy);
319349cc55cSDimitry Andric #endif
320349cc55cSDimitry Andric
321*0fca6ea1SDimitry Andric const auto &DL = MF->getFunction().getDataLayout();
322349cc55cSDimitry Andric bool AnyMerged = false;
323349cc55cSDimitry Andric do {
32406c3fb27SDimitry Andric unsigned NumPow2 = llvm::bit_floor(StoresToMerge.size());
325bdd1243dSDimitry Andric unsigned MaxSizeBits = NumPow2 * OrigTy.getSizeInBits().getFixedValue();
326349cc55cSDimitry Andric // Compute the biggest store we can generate to handle the number of stores.
327349cc55cSDimitry Andric unsigned MergeSizeBits;
328349cc55cSDimitry Andric for (MergeSizeBits = MaxSizeBits; MergeSizeBits > 1; MergeSizeBits /= 2) {
329349cc55cSDimitry Andric LLT StoreTy = LLT::scalar(MergeSizeBits);
330349cc55cSDimitry Andric EVT StoreEVT =
331349cc55cSDimitry Andric getApproximateEVTForLLT(StoreTy, DL, MF->getFunction().getContext());
332349cc55cSDimitry Andric if (LegalSizes.size() > MergeSizeBits && LegalSizes[MergeSizeBits] &&
333349cc55cSDimitry Andric TLI->canMergeStoresTo(AS, StoreEVT, *MF) &&
334349cc55cSDimitry Andric (TLI->isTypeLegal(StoreEVT)))
335349cc55cSDimitry Andric break; // We can generate a MergeSize bits store.
336349cc55cSDimitry Andric }
337349cc55cSDimitry Andric if (MergeSizeBits <= OrigTy.getSizeInBits())
338349cc55cSDimitry Andric return AnyMerged; // No greater merge.
339349cc55cSDimitry Andric
340349cc55cSDimitry Andric unsigned NumStoresToMerge = MergeSizeBits / OrigTy.getSizeInBits();
341349cc55cSDimitry Andric // Perform the actual merging.
342349cc55cSDimitry Andric SmallVector<GStore *, 8> SingleMergeStores(
343349cc55cSDimitry Andric StoresToMerge.begin(), StoresToMerge.begin() + NumStoresToMerge);
344349cc55cSDimitry Andric AnyMerged |= doSingleStoreMerge(SingleMergeStores);
345349cc55cSDimitry Andric StoresToMerge.erase(StoresToMerge.begin(),
346349cc55cSDimitry Andric StoresToMerge.begin() + NumStoresToMerge);
347349cc55cSDimitry Andric } while (StoresToMerge.size() > 1);
348349cc55cSDimitry Andric return AnyMerged;
349349cc55cSDimitry Andric }
350349cc55cSDimitry Andric
isLegalOrBeforeLegalizer(const LegalityQuery & Query,MachineFunction & MF) const351349cc55cSDimitry Andric bool LoadStoreOpt::isLegalOrBeforeLegalizer(const LegalityQuery &Query,
352349cc55cSDimitry Andric MachineFunction &MF) const {
353349cc55cSDimitry Andric auto Action = LI->getAction(Query).Action;
354349cc55cSDimitry Andric // If the instruction is unsupported, it can't be legalized at all.
355349cc55cSDimitry Andric if (Action == LegalizeActions::Unsupported)
356349cc55cSDimitry Andric return false;
357349cc55cSDimitry Andric return IsPreLegalizer || Action == LegalizeAction::Legal;
358349cc55cSDimitry Andric }
359349cc55cSDimitry Andric
doSingleStoreMerge(SmallVectorImpl<GStore * > & Stores)360349cc55cSDimitry Andric bool LoadStoreOpt::doSingleStoreMerge(SmallVectorImpl<GStore *> &Stores) {
361349cc55cSDimitry Andric assert(Stores.size() > 1);
362349cc55cSDimitry Andric // We know that all the stores are consecutive and there are no aliasing
363349cc55cSDimitry Andric // operations in the range. However, the values that are being stored may be
364349cc55cSDimitry Andric // generated anywhere before each store. To ensure we have the values
365349cc55cSDimitry Andric // available, we materialize the wide value and new store at the place of the
366349cc55cSDimitry Andric // final store in the merge sequence.
367349cc55cSDimitry Andric GStore *FirstStore = Stores[0];
368349cc55cSDimitry Andric const unsigned NumStores = Stores.size();
369349cc55cSDimitry Andric LLT SmallTy = MRI->getType(FirstStore->getValueReg());
370349cc55cSDimitry Andric LLT WideValueTy =
371bdd1243dSDimitry Andric LLT::scalar(NumStores * SmallTy.getSizeInBits().getFixedValue());
372349cc55cSDimitry Andric
373349cc55cSDimitry Andric // For each store, compute pairwise merged debug locs.
374bdd1243dSDimitry Andric DebugLoc MergedLoc = Stores.front()->getDebugLoc();
375bdd1243dSDimitry Andric for (auto *Store : drop_begin(Stores))
376bdd1243dSDimitry Andric MergedLoc = DILocation::getMergedLocation(MergedLoc, Store->getDebugLoc());
377bdd1243dSDimitry Andric
378349cc55cSDimitry Andric Builder.setInstr(*Stores.back());
379349cc55cSDimitry Andric Builder.setDebugLoc(MergedLoc);
380349cc55cSDimitry Andric
381349cc55cSDimitry Andric // If all of the store values are constants, then create a wide constant
382349cc55cSDimitry Andric // directly. Otherwise, we need to generate some instructions to merge the
383349cc55cSDimitry Andric // existing values together into a wider type.
384349cc55cSDimitry Andric SmallVector<APInt, 8> ConstantVals;
385fcaf7f86SDimitry Andric for (auto *Store : Stores) {
386349cc55cSDimitry Andric auto MaybeCst =
387349cc55cSDimitry Andric getIConstantVRegValWithLookThrough(Store->getValueReg(), *MRI);
388349cc55cSDimitry Andric if (!MaybeCst) {
389349cc55cSDimitry Andric ConstantVals.clear();
390349cc55cSDimitry Andric break;
391349cc55cSDimitry Andric }
392349cc55cSDimitry Andric ConstantVals.emplace_back(MaybeCst->Value);
393349cc55cSDimitry Andric }
394349cc55cSDimitry Andric
395349cc55cSDimitry Andric Register WideReg;
396349cc55cSDimitry Andric auto *WideMMO =
397349cc55cSDimitry Andric MF->getMachineMemOperand(&FirstStore->getMMO(), 0, WideValueTy);
398349cc55cSDimitry Andric if (ConstantVals.empty()) {
399349cc55cSDimitry Andric // Mimic the SDAG behaviour here and don't try to do anything for unknown
400349cc55cSDimitry Andric // values. In future, we should also support the cases of loads and
401349cc55cSDimitry Andric // extracted vector elements.
402349cc55cSDimitry Andric return false;
403349cc55cSDimitry Andric }
404349cc55cSDimitry Andric
405349cc55cSDimitry Andric assert(ConstantVals.size() == NumStores);
406349cc55cSDimitry Andric // Check if our wide constant is legal.
407349cc55cSDimitry Andric if (!isLegalOrBeforeLegalizer({TargetOpcode::G_CONSTANT, {WideValueTy}}, *MF))
408349cc55cSDimitry Andric return false;
409349cc55cSDimitry Andric APInt WideConst(WideValueTy.getSizeInBits(), 0);
410349cc55cSDimitry Andric for (unsigned Idx = 0; Idx < ConstantVals.size(); ++Idx) {
411349cc55cSDimitry Andric // Insert the smaller constant into the corresponding position in the
412349cc55cSDimitry Andric // wider one.
413349cc55cSDimitry Andric WideConst.insertBits(ConstantVals[Idx], Idx * SmallTy.getSizeInBits());
414349cc55cSDimitry Andric }
415349cc55cSDimitry Andric WideReg = Builder.buildConstant(WideValueTy, WideConst).getReg(0);
416349cc55cSDimitry Andric auto NewStore =
417349cc55cSDimitry Andric Builder.buildStore(WideReg, FirstStore->getPointerReg(), *WideMMO);
418349cc55cSDimitry Andric (void) NewStore;
41906c3fb27SDimitry Andric LLVM_DEBUG(dbgs() << "Merged " << Stores.size()
42006c3fb27SDimitry Andric << " stores into merged store: " << *NewStore);
42106c3fb27SDimitry Andric LLVM_DEBUG(for (auto *MI : Stores) dbgs() << " " << *MI;);
422349cc55cSDimitry Andric NumStoresMerged += Stores.size();
423349cc55cSDimitry Andric
424349cc55cSDimitry Andric MachineOptimizationRemarkEmitter MORE(*MF, nullptr);
425349cc55cSDimitry Andric MORE.emit([&]() {
426349cc55cSDimitry Andric MachineOptimizationRemark R(DEBUG_TYPE, "MergedStore",
427349cc55cSDimitry Andric FirstStore->getDebugLoc(),
428349cc55cSDimitry Andric FirstStore->getParent());
429349cc55cSDimitry Andric R << "Merged " << NV("NumMerged", Stores.size()) << " stores of "
430349cc55cSDimitry Andric << NV("OrigWidth", SmallTy.getSizeInBytes())
431349cc55cSDimitry Andric << " bytes into a single store of "
432349cc55cSDimitry Andric << NV("NewWidth", WideValueTy.getSizeInBytes()) << " bytes";
433349cc55cSDimitry Andric return R;
434349cc55cSDimitry Andric });
435349cc55cSDimitry Andric
436fcaf7f86SDimitry Andric for (auto *MI : Stores)
437349cc55cSDimitry Andric InstsToErase.insert(MI);
438349cc55cSDimitry Andric return true;
439349cc55cSDimitry Andric }
440349cc55cSDimitry Andric
processMergeCandidate(StoreMergeCandidate & C)441349cc55cSDimitry Andric bool LoadStoreOpt::processMergeCandidate(StoreMergeCandidate &C) {
442349cc55cSDimitry Andric if (C.Stores.size() < 2) {
443349cc55cSDimitry Andric C.reset();
444349cc55cSDimitry Andric return false;
445349cc55cSDimitry Andric }
446349cc55cSDimitry Andric
447349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Checking store merge candidate with " << C.Stores.size()
448349cc55cSDimitry Andric << " stores, starting with " << *C.Stores[0]);
449349cc55cSDimitry Andric // We know that the stores in the candidate are adjacent.
450349cc55cSDimitry Andric // Now we need to check if any potential aliasing instructions recorded
451349cc55cSDimitry Andric // during the search alias with load/stores added to the candidate after.
452349cc55cSDimitry Andric // For example, if we have the candidate:
453349cc55cSDimitry Andric // C.Stores = [ST1, ST2, ST3, ST4]
454349cc55cSDimitry Andric // and after seeing ST2 we saw a load LD1, which did not alias with ST1 or
455349cc55cSDimitry Andric // ST2, then we would have recorded it into the PotentialAliases structure
456349cc55cSDimitry Andric // with the associated index value of "1". Then we see ST3 and ST4 and add
457349cc55cSDimitry Andric // them to the candidate group. We know that LD1 does not alias with ST1 or
458349cc55cSDimitry Andric // ST2, since we already did that check. However we don't yet know if it
459349cc55cSDimitry Andric // may alias ST3 and ST4, so we perform those checks now.
460349cc55cSDimitry Andric SmallVector<GStore *> StoresToMerge;
461349cc55cSDimitry Andric
462349cc55cSDimitry Andric auto DoesStoreAliasWithPotential = [&](unsigned Idx, GStore &CheckStore) {
463349cc55cSDimitry Andric for (auto AliasInfo : reverse(C.PotentialAliases)) {
464349cc55cSDimitry Andric MachineInstr *PotentialAliasOp = AliasInfo.first;
465349cc55cSDimitry Andric unsigned PreCheckedIdx = AliasInfo.second;
46606c3fb27SDimitry Andric if (static_cast<unsigned>(Idx) < PreCheckedIdx) {
46706c3fb27SDimitry Andric // Once our store index is lower than the index associated with the
46806c3fb27SDimitry Andric // potential alias, we know that we've already checked for this alias
46906c3fb27SDimitry Andric // and all of the earlier potential aliases too.
47006c3fb27SDimitry Andric return false;
47106c3fb27SDimitry Andric }
472349cc55cSDimitry Andric // Need to check this alias.
473349cc55cSDimitry Andric if (GISelAddressing::instMayAlias(CheckStore, *PotentialAliasOp, *MRI,
474349cc55cSDimitry Andric AA)) {
475349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Potential alias " << *PotentialAliasOp
476349cc55cSDimitry Andric << " detected\n");
477349cc55cSDimitry Andric return true;
478349cc55cSDimitry Andric }
479349cc55cSDimitry Andric }
480349cc55cSDimitry Andric return false;
481349cc55cSDimitry Andric };
482349cc55cSDimitry Andric // Start from the last store in the group, and check if it aliases with any
483349cc55cSDimitry Andric // of the potential aliasing operations in the list.
484349cc55cSDimitry Andric for (int StoreIdx = C.Stores.size() - 1; StoreIdx >= 0; --StoreIdx) {
485349cc55cSDimitry Andric auto *CheckStore = C.Stores[StoreIdx];
486349cc55cSDimitry Andric if (DoesStoreAliasWithPotential(StoreIdx, *CheckStore))
487349cc55cSDimitry Andric continue;
488349cc55cSDimitry Andric StoresToMerge.emplace_back(CheckStore);
489349cc55cSDimitry Andric }
490349cc55cSDimitry Andric
491349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << StoresToMerge.size()
492349cc55cSDimitry Andric << " stores remaining after alias checks. Merging...\n");
493349cc55cSDimitry Andric
494349cc55cSDimitry Andric // Now we've checked for aliasing hazards, merge any stores left.
495349cc55cSDimitry Andric C.reset();
496349cc55cSDimitry Andric if (StoresToMerge.size() < 2)
497349cc55cSDimitry Andric return false;
498349cc55cSDimitry Andric return mergeStores(StoresToMerge);
499349cc55cSDimitry Andric }
500349cc55cSDimitry Andric
operationAliasesWithCandidate(MachineInstr & MI,StoreMergeCandidate & C)501349cc55cSDimitry Andric bool LoadStoreOpt::operationAliasesWithCandidate(MachineInstr &MI,
502349cc55cSDimitry Andric StoreMergeCandidate &C) {
503349cc55cSDimitry Andric if (C.Stores.empty())
504349cc55cSDimitry Andric return false;
505349cc55cSDimitry Andric return llvm::any_of(C.Stores, [&](MachineInstr *OtherMI) {
506349cc55cSDimitry Andric return instMayAlias(MI, *OtherMI, *MRI, AA);
507349cc55cSDimitry Andric });
508349cc55cSDimitry Andric }
509349cc55cSDimitry Andric
addPotentialAlias(MachineInstr & MI)510349cc55cSDimitry Andric void LoadStoreOpt::StoreMergeCandidate::addPotentialAlias(MachineInstr &MI) {
511349cc55cSDimitry Andric PotentialAliases.emplace_back(std::make_pair(&MI, Stores.size() - 1));
512349cc55cSDimitry Andric }
513349cc55cSDimitry Andric
addStoreToCandidate(GStore & StoreMI,StoreMergeCandidate & C)514349cc55cSDimitry Andric bool LoadStoreOpt::addStoreToCandidate(GStore &StoreMI,
515349cc55cSDimitry Andric StoreMergeCandidate &C) {
516349cc55cSDimitry Andric // Check if the given store writes to an adjacent address, and other
517349cc55cSDimitry Andric // requirements.
518349cc55cSDimitry Andric LLT ValueTy = MRI->getType(StoreMI.getValueReg());
519349cc55cSDimitry Andric LLT PtrTy = MRI->getType(StoreMI.getPointerReg());
520349cc55cSDimitry Andric
521349cc55cSDimitry Andric // Only handle scalars.
522349cc55cSDimitry Andric if (!ValueTy.isScalar())
523349cc55cSDimitry Andric return false;
524349cc55cSDimitry Andric
525349cc55cSDimitry Andric // Don't allow truncating stores for now.
526349cc55cSDimitry Andric if (StoreMI.getMemSizeInBits() != ValueTy.getSizeInBits())
527349cc55cSDimitry Andric return false;
528349cc55cSDimitry Andric
52981ad6265SDimitry Andric // Avoid adding volatile or ordered stores to the candidate. We already have a
53081ad6265SDimitry Andric // check for this in instMayAlias() but that only get's called later between
53181ad6265SDimitry Andric // potential aliasing hazards.
53281ad6265SDimitry Andric if (!StoreMI.isSimple())
53381ad6265SDimitry Andric return false;
53481ad6265SDimitry Andric
535349cc55cSDimitry Andric Register StoreAddr = StoreMI.getPointerReg();
536349cc55cSDimitry Andric auto BIO = getPointerInfo(StoreAddr, *MRI);
5375678d1d9SDimitry Andric Register StoreBase = BIO.getBase();
538349cc55cSDimitry Andric if (C.Stores.empty()) {
5395678d1d9SDimitry Andric C.BasePtr = StoreBase;
5405678d1d9SDimitry Andric if (!BIO.hasValidOffset()) {
5415678d1d9SDimitry Andric C.CurrentLowestOffset = 0;
5425678d1d9SDimitry Andric } else {
5435678d1d9SDimitry Andric C.CurrentLowestOffset = BIO.getOffset();
5445678d1d9SDimitry Andric }
545349cc55cSDimitry Andric // This is the first store of the candidate.
546349cc55cSDimitry Andric // If the offset can't possibly allow for a lower addressed store with the
547349cc55cSDimitry Andric // same base, don't bother adding it.
5485678d1d9SDimitry Andric if (BIO.hasValidOffset() &&
5495678d1d9SDimitry Andric BIO.getOffset() < static_cast<int64_t>(ValueTy.getSizeInBytes()))
550349cc55cSDimitry Andric return false;
551349cc55cSDimitry Andric C.Stores.emplace_back(&StoreMI);
552349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Starting a new merge candidate group with: "
553349cc55cSDimitry Andric << StoreMI);
554349cc55cSDimitry Andric return true;
555349cc55cSDimitry Andric }
556349cc55cSDimitry Andric
557349cc55cSDimitry Andric // Check the store is the same size as the existing ones in the candidate.
558349cc55cSDimitry Andric if (MRI->getType(C.Stores[0]->getValueReg()).getSizeInBits() !=
559349cc55cSDimitry Andric ValueTy.getSizeInBits())
560349cc55cSDimitry Andric return false;
561349cc55cSDimitry Andric
562349cc55cSDimitry Andric if (MRI->getType(C.Stores[0]->getPointerReg()).getAddressSpace() !=
563349cc55cSDimitry Andric PtrTy.getAddressSpace())
564349cc55cSDimitry Andric return false;
565349cc55cSDimitry Andric
566349cc55cSDimitry Andric // There are other stores in the candidate. Check that the store address
567349cc55cSDimitry Andric // writes to the next lowest adjacent address.
568349cc55cSDimitry Andric if (C.BasePtr != StoreBase)
569349cc55cSDimitry Andric return false;
5705678d1d9SDimitry Andric // If we don't have a valid offset, we can't guarantee to be an adjacent
5715678d1d9SDimitry Andric // offset.
5725678d1d9SDimitry Andric if (!BIO.hasValidOffset())
5735678d1d9SDimitry Andric return false;
5745678d1d9SDimitry Andric if ((C.CurrentLowestOffset -
5755678d1d9SDimitry Andric static_cast<int64_t>(ValueTy.getSizeInBytes())) != BIO.getOffset())
576349cc55cSDimitry Andric return false;
577349cc55cSDimitry Andric
578349cc55cSDimitry Andric // This writes to an adjacent address. Allow it.
579349cc55cSDimitry Andric C.Stores.emplace_back(&StoreMI);
580349cc55cSDimitry Andric C.CurrentLowestOffset = C.CurrentLowestOffset - ValueTy.getSizeInBytes();
581349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Candidate added store: " << StoreMI);
582349cc55cSDimitry Andric return true;
583349cc55cSDimitry Andric }
584349cc55cSDimitry Andric
mergeBlockStores(MachineBasicBlock & MBB)585349cc55cSDimitry Andric bool LoadStoreOpt::mergeBlockStores(MachineBasicBlock &MBB) {
586349cc55cSDimitry Andric bool Changed = false;
587349cc55cSDimitry Andric // Walk through the block bottom-up, looking for merging candidates.
588349cc55cSDimitry Andric StoreMergeCandidate Candidate;
5890eae32dcSDimitry Andric for (MachineInstr &MI : llvm::reverse(MBB)) {
590349cc55cSDimitry Andric if (InstsToErase.contains(&MI))
591349cc55cSDimitry Andric continue;
592349cc55cSDimitry Andric
5930eae32dcSDimitry Andric if (auto *StoreMI = dyn_cast<GStore>(&MI)) {
594349cc55cSDimitry Andric // We have a G_STORE. Add it to the candidate if it writes to an adjacent
595349cc55cSDimitry Andric // address.
596349cc55cSDimitry Andric if (!addStoreToCandidate(*StoreMI, Candidate)) {
597349cc55cSDimitry Andric // Store wasn't eligible to be added. May need to record it as a
598349cc55cSDimitry Andric // potential alias.
599349cc55cSDimitry Andric if (operationAliasesWithCandidate(*StoreMI, Candidate)) {
600349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate);
601349cc55cSDimitry Andric continue;
602349cc55cSDimitry Andric }
603349cc55cSDimitry Andric Candidate.addPotentialAlias(*StoreMI);
604349cc55cSDimitry Andric }
605349cc55cSDimitry Andric continue;
606349cc55cSDimitry Andric }
607349cc55cSDimitry Andric
608349cc55cSDimitry Andric // If we don't have any stores yet, this instruction can't pose a problem.
609349cc55cSDimitry Andric if (Candidate.Stores.empty())
610349cc55cSDimitry Andric continue;
611349cc55cSDimitry Andric
612349cc55cSDimitry Andric // We're dealing with some other kind of instruction.
613349cc55cSDimitry Andric if (isInstHardMergeHazard(MI)) {
614349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate);
615349cc55cSDimitry Andric Candidate.Stores.clear();
616349cc55cSDimitry Andric continue;
617349cc55cSDimitry Andric }
618349cc55cSDimitry Andric
619349cc55cSDimitry Andric if (!MI.mayLoadOrStore())
620349cc55cSDimitry Andric continue;
621349cc55cSDimitry Andric
622349cc55cSDimitry Andric if (operationAliasesWithCandidate(MI, Candidate)) {
623349cc55cSDimitry Andric // We have a potential alias, so process the current candidate if we can
624349cc55cSDimitry Andric // and then continue looking for a new candidate.
625349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate);
626349cc55cSDimitry Andric continue;
627349cc55cSDimitry Andric }
628349cc55cSDimitry Andric
629349cc55cSDimitry Andric // Record this instruction as a potential alias for future stores that are
630349cc55cSDimitry Andric // added to the candidate.
631349cc55cSDimitry Andric Candidate.addPotentialAlias(MI);
632349cc55cSDimitry Andric }
633349cc55cSDimitry Andric
634349cc55cSDimitry Andric // Process any candidate left after finishing searching the entire block.
635349cc55cSDimitry Andric Changed |= processMergeCandidate(Candidate);
636349cc55cSDimitry Andric
637349cc55cSDimitry Andric // Erase instructions now that we're no longer iterating over the block.
638349cc55cSDimitry Andric for (auto *MI : InstsToErase)
639349cc55cSDimitry Andric MI->eraseFromParent();
640349cc55cSDimitry Andric InstsToErase.clear();
641349cc55cSDimitry Andric return Changed;
642349cc55cSDimitry Andric }
643349cc55cSDimitry Andric
64406c3fb27SDimitry Andric /// Check if the store \p Store is a truncstore that can be merged. That is,
64506c3fb27SDimitry Andric /// it's a store of a shifted value of \p SrcVal. If \p SrcVal is an empty
64606c3fb27SDimitry Andric /// Register then it does not need to match and SrcVal is set to the source
64706c3fb27SDimitry Andric /// value found.
64806c3fb27SDimitry Andric /// On match, returns the start byte offset of the \p SrcVal that is being
64906c3fb27SDimitry Andric /// stored.
65006c3fb27SDimitry Andric static std::optional<int64_t>
getTruncStoreByteOffset(GStore & Store,Register & SrcVal,MachineRegisterInfo & MRI)65106c3fb27SDimitry Andric getTruncStoreByteOffset(GStore &Store, Register &SrcVal,
65206c3fb27SDimitry Andric MachineRegisterInfo &MRI) {
65306c3fb27SDimitry Andric Register TruncVal;
65406c3fb27SDimitry Andric if (!mi_match(Store.getValueReg(), MRI, m_GTrunc(m_Reg(TruncVal))))
65506c3fb27SDimitry Andric return std::nullopt;
65606c3fb27SDimitry Andric
65706c3fb27SDimitry Andric // The shift amount must be a constant multiple of the narrow type.
65806c3fb27SDimitry Andric // It is translated to the offset address in the wide source value "y".
65906c3fb27SDimitry Andric //
66006c3fb27SDimitry Andric // x = G_LSHR y, ShiftAmtC
66106c3fb27SDimitry Andric // s8 z = G_TRUNC x
66206c3fb27SDimitry Andric // store z, ...
66306c3fb27SDimitry Andric Register FoundSrcVal;
66406c3fb27SDimitry Andric int64_t ShiftAmt;
66506c3fb27SDimitry Andric if (!mi_match(TruncVal, MRI,
66606c3fb27SDimitry Andric m_any_of(m_GLShr(m_Reg(FoundSrcVal), m_ICst(ShiftAmt)),
66706c3fb27SDimitry Andric m_GAShr(m_Reg(FoundSrcVal), m_ICst(ShiftAmt))))) {
66806c3fb27SDimitry Andric if (!SrcVal.isValid() || TruncVal == SrcVal) {
66906c3fb27SDimitry Andric if (!SrcVal.isValid())
67006c3fb27SDimitry Andric SrcVal = TruncVal;
67106c3fb27SDimitry Andric return 0; // If it's the lowest index store.
67206c3fb27SDimitry Andric }
67306c3fb27SDimitry Andric return std::nullopt;
67406c3fb27SDimitry Andric }
67506c3fb27SDimitry Andric
67606c3fb27SDimitry Andric unsigned NarrowBits = Store.getMMO().getMemoryType().getScalarSizeInBits();
67706c3fb27SDimitry Andric if (ShiftAmt % NarrowBits != 0)
67806c3fb27SDimitry Andric return std::nullopt;
67906c3fb27SDimitry Andric const unsigned Offset = ShiftAmt / NarrowBits;
68006c3fb27SDimitry Andric
68106c3fb27SDimitry Andric if (SrcVal.isValid() && FoundSrcVal != SrcVal)
68206c3fb27SDimitry Andric return std::nullopt;
68306c3fb27SDimitry Andric
68406c3fb27SDimitry Andric if (!SrcVal.isValid())
68506c3fb27SDimitry Andric SrcVal = FoundSrcVal;
68606c3fb27SDimitry Andric else if (MRI.getType(SrcVal) != MRI.getType(FoundSrcVal))
68706c3fb27SDimitry Andric return std::nullopt;
68806c3fb27SDimitry Andric return Offset;
68906c3fb27SDimitry Andric }
69006c3fb27SDimitry Andric
69106c3fb27SDimitry Andric /// Match a pattern where a wide type scalar value is stored by several narrow
69206c3fb27SDimitry Andric /// stores. Fold it into a single store or a BSWAP and a store if the targets
69306c3fb27SDimitry Andric /// supports it.
69406c3fb27SDimitry Andric ///
69506c3fb27SDimitry Andric /// Assuming little endian target:
69606c3fb27SDimitry Andric /// i8 *p = ...
69706c3fb27SDimitry Andric /// i32 val = ...
69806c3fb27SDimitry Andric /// p[0] = (val >> 0) & 0xFF;
69906c3fb27SDimitry Andric /// p[1] = (val >> 8) & 0xFF;
70006c3fb27SDimitry Andric /// p[2] = (val >> 16) & 0xFF;
70106c3fb27SDimitry Andric /// p[3] = (val >> 24) & 0xFF;
70206c3fb27SDimitry Andric /// =>
70306c3fb27SDimitry Andric /// *((i32)p) = val;
70406c3fb27SDimitry Andric ///
70506c3fb27SDimitry Andric /// i8 *p = ...
70606c3fb27SDimitry Andric /// i32 val = ...
70706c3fb27SDimitry Andric /// p[0] = (val >> 24) & 0xFF;
70806c3fb27SDimitry Andric /// p[1] = (val >> 16) & 0xFF;
70906c3fb27SDimitry Andric /// p[2] = (val >> 8) & 0xFF;
71006c3fb27SDimitry Andric /// p[3] = (val >> 0) & 0xFF;
71106c3fb27SDimitry Andric /// =>
71206c3fb27SDimitry Andric /// *((i32)p) = BSWAP(val);
mergeTruncStore(GStore & StoreMI,SmallPtrSetImpl<GStore * > & DeletedStores)71306c3fb27SDimitry Andric bool LoadStoreOpt::mergeTruncStore(GStore &StoreMI,
71406c3fb27SDimitry Andric SmallPtrSetImpl<GStore *> &DeletedStores) {
71506c3fb27SDimitry Andric LLT MemTy = StoreMI.getMMO().getMemoryType();
71606c3fb27SDimitry Andric
71706c3fb27SDimitry Andric // We only handle merging simple stores of 1-4 bytes.
71806c3fb27SDimitry Andric if (!MemTy.isScalar())
71906c3fb27SDimitry Andric return false;
72006c3fb27SDimitry Andric switch (MemTy.getSizeInBits()) {
72106c3fb27SDimitry Andric case 8:
72206c3fb27SDimitry Andric case 16:
72306c3fb27SDimitry Andric case 32:
72406c3fb27SDimitry Andric break;
72506c3fb27SDimitry Andric default:
72606c3fb27SDimitry Andric return false;
72706c3fb27SDimitry Andric }
72806c3fb27SDimitry Andric if (!StoreMI.isSimple())
72906c3fb27SDimitry Andric return false;
73006c3fb27SDimitry Andric
73106c3fb27SDimitry Andric // We do a simple search for mergeable stores prior to this one.
73206c3fb27SDimitry Andric // Any potential alias hazard along the way terminates the search.
73306c3fb27SDimitry Andric SmallVector<GStore *> FoundStores;
73406c3fb27SDimitry Andric
73506c3fb27SDimitry Andric // We're looking for:
73606c3fb27SDimitry Andric // 1) a (store(trunc(...)))
73706c3fb27SDimitry Andric // 2) of an LSHR/ASHR of a single wide value, by the appropriate shift to get
73806c3fb27SDimitry Andric // the partial value stored.
73906c3fb27SDimitry Andric // 3) where the offsets form either a little or big-endian sequence.
74006c3fb27SDimitry Andric
74106c3fb27SDimitry Andric auto &LastStore = StoreMI;
74206c3fb27SDimitry Andric
74306c3fb27SDimitry Andric // The single base pointer that all stores must use.
74406c3fb27SDimitry Andric Register BaseReg;
74506c3fb27SDimitry Andric int64_t LastOffset;
74606c3fb27SDimitry Andric if (!mi_match(LastStore.getPointerReg(), *MRI,
74706c3fb27SDimitry Andric m_GPtrAdd(m_Reg(BaseReg), m_ICst(LastOffset)))) {
74806c3fb27SDimitry Andric BaseReg = LastStore.getPointerReg();
74906c3fb27SDimitry Andric LastOffset = 0;
75006c3fb27SDimitry Andric }
75106c3fb27SDimitry Andric
75206c3fb27SDimitry Andric GStore *LowestIdxStore = &LastStore;
75306c3fb27SDimitry Andric int64_t LowestIdxOffset = LastOffset;
75406c3fb27SDimitry Andric
75506c3fb27SDimitry Andric Register WideSrcVal;
75606c3fb27SDimitry Andric auto LowestShiftAmt = getTruncStoreByteOffset(LastStore, WideSrcVal, *MRI);
75706c3fb27SDimitry Andric if (!LowestShiftAmt)
75806c3fb27SDimitry Andric return false; // Didn't match a trunc.
75906c3fb27SDimitry Andric assert(WideSrcVal.isValid());
76006c3fb27SDimitry Andric
76106c3fb27SDimitry Andric LLT WideStoreTy = MRI->getType(WideSrcVal);
76206c3fb27SDimitry Andric // The wide type might not be a multiple of the memory type, e.g. s48 and s32.
76306c3fb27SDimitry Andric if (WideStoreTy.getSizeInBits() % MemTy.getSizeInBits() != 0)
76406c3fb27SDimitry Andric return false;
76506c3fb27SDimitry Andric const unsigned NumStoresRequired =
76606c3fb27SDimitry Andric WideStoreTy.getSizeInBits() / MemTy.getSizeInBits();
76706c3fb27SDimitry Andric
76806c3fb27SDimitry Andric SmallVector<int64_t, 8> OffsetMap(NumStoresRequired, INT64_MAX);
76906c3fb27SDimitry Andric OffsetMap[*LowestShiftAmt] = LastOffset;
77006c3fb27SDimitry Andric FoundStores.emplace_back(&LastStore);
77106c3fb27SDimitry Andric
77206c3fb27SDimitry Andric const int MaxInstsToCheck = 10;
77306c3fb27SDimitry Andric int NumInstsChecked = 0;
77406c3fb27SDimitry Andric for (auto II = ++LastStore.getReverseIterator();
77506c3fb27SDimitry Andric II != LastStore.getParent()->rend() && NumInstsChecked < MaxInstsToCheck;
77606c3fb27SDimitry Andric ++II) {
77706c3fb27SDimitry Andric NumInstsChecked++;
77806c3fb27SDimitry Andric GStore *NewStore;
77906c3fb27SDimitry Andric if ((NewStore = dyn_cast<GStore>(&*II))) {
78006c3fb27SDimitry Andric if (NewStore->getMMO().getMemoryType() != MemTy || !NewStore->isSimple())
78106c3fb27SDimitry Andric break;
78206c3fb27SDimitry Andric } else if (II->isLoadFoldBarrier() || II->mayLoad()) {
78306c3fb27SDimitry Andric break;
78406c3fb27SDimitry Andric } else {
78506c3fb27SDimitry Andric continue; // This is a safe instruction we can look past.
78606c3fb27SDimitry Andric }
78706c3fb27SDimitry Andric
78806c3fb27SDimitry Andric Register NewBaseReg;
78906c3fb27SDimitry Andric int64_t MemOffset;
79006c3fb27SDimitry Andric // Check we're storing to the same base + some offset.
79106c3fb27SDimitry Andric if (!mi_match(NewStore->getPointerReg(), *MRI,
79206c3fb27SDimitry Andric m_GPtrAdd(m_Reg(NewBaseReg), m_ICst(MemOffset)))) {
79306c3fb27SDimitry Andric NewBaseReg = NewStore->getPointerReg();
79406c3fb27SDimitry Andric MemOffset = 0;
79506c3fb27SDimitry Andric }
79606c3fb27SDimitry Andric if (BaseReg != NewBaseReg)
79706c3fb27SDimitry Andric break;
79806c3fb27SDimitry Andric
79906c3fb27SDimitry Andric auto ShiftByteOffset = getTruncStoreByteOffset(*NewStore, WideSrcVal, *MRI);
80006c3fb27SDimitry Andric if (!ShiftByteOffset)
80106c3fb27SDimitry Andric break;
80206c3fb27SDimitry Andric if (MemOffset < LowestIdxOffset) {
80306c3fb27SDimitry Andric LowestIdxOffset = MemOffset;
80406c3fb27SDimitry Andric LowestIdxStore = NewStore;
80506c3fb27SDimitry Andric }
80606c3fb27SDimitry Andric
80706c3fb27SDimitry Andric // Map the offset in the store and the offset in the combined value, and
80806c3fb27SDimitry Andric // early return if it has been set before.
80906c3fb27SDimitry Andric if (*ShiftByteOffset < 0 || *ShiftByteOffset >= NumStoresRequired ||
81006c3fb27SDimitry Andric OffsetMap[*ShiftByteOffset] != INT64_MAX)
81106c3fb27SDimitry Andric break;
81206c3fb27SDimitry Andric OffsetMap[*ShiftByteOffset] = MemOffset;
81306c3fb27SDimitry Andric
81406c3fb27SDimitry Andric FoundStores.emplace_back(NewStore);
81506c3fb27SDimitry Andric // Reset counter since we've found a matching inst.
81606c3fb27SDimitry Andric NumInstsChecked = 0;
81706c3fb27SDimitry Andric if (FoundStores.size() == NumStoresRequired)
81806c3fb27SDimitry Andric break;
81906c3fb27SDimitry Andric }
82006c3fb27SDimitry Andric
82106c3fb27SDimitry Andric if (FoundStores.size() != NumStoresRequired) {
82206c3fb27SDimitry Andric if (FoundStores.size() == 1)
82306c3fb27SDimitry Andric return false;
82406c3fb27SDimitry Andric // We didn't find enough stores to merge into the size of the original
82506c3fb27SDimitry Andric // source value, but we may be able to generate a smaller store if we
82606c3fb27SDimitry Andric // truncate the source value.
82706c3fb27SDimitry Andric WideStoreTy = LLT::scalar(FoundStores.size() * MemTy.getScalarSizeInBits());
82806c3fb27SDimitry Andric }
82906c3fb27SDimitry Andric
83006c3fb27SDimitry Andric unsigned NumStoresFound = FoundStores.size();
83106c3fb27SDimitry Andric
83206c3fb27SDimitry Andric const auto &DL = LastStore.getMF()->getDataLayout();
83306c3fb27SDimitry Andric auto &C = LastStore.getMF()->getFunction().getContext();
83406c3fb27SDimitry Andric // Check that a store of the wide type is both allowed and fast on the target
83506c3fb27SDimitry Andric unsigned Fast = 0;
83606c3fb27SDimitry Andric bool Allowed = TLI->allowsMemoryAccess(
83706c3fb27SDimitry Andric C, DL, WideStoreTy, LowestIdxStore->getMMO(), &Fast);
83806c3fb27SDimitry Andric if (!Allowed || !Fast)
83906c3fb27SDimitry Andric return false;
84006c3fb27SDimitry Andric
84106c3fb27SDimitry Andric // Check if the pieces of the value are going to the expected places in memory
84206c3fb27SDimitry Andric // to merge the stores.
84306c3fb27SDimitry Andric unsigned NarrowBits = MemTy.getScalarSizeInBits();
84406c3fb27SDimitry Andric auto checkOffsets = [&](bool MatchLittleEndian) {
84506c3fb27SDimitry Andric if (MatchLittleEndian) {
84606c3fb27SDimitry Andric for (unsigned i = 0; i != NumStoresFound; ++i)
84706c3fb27SDimitry Andric if (OffsetMap[i] != i * (NarrowBits / 8) + LowestIdxOffset)
84806c3fb27SDimitry Andric return false;
84906c3fb27SDimitry Andric } else { // MatchBigEndian by reversing loop counter.
85006c3fb27SDimitry Andric for (unsigned i = 0, j = NumStoresFound - 1; i != NumStoresFound;
85106c3fb27SDimitry Andric ++i, --j)
85206c3fb27SDimitry Andric if (OffsetMap[j] != i * (NarrowBits / 8) + LowestIdxOffset)
85306c3fb27SDimitry Andric return false;
85406c3fb27SDimitry Andric }
85506c3fb27SDimitry Andric return true;
85606c3fb27SDimitry Andric };
85706c3fb27SDimitry Andric
85806c3fb27SDimitry Andric // Check if the offsets line up for the native data layout of this target.
85906c3fb27SDimitry Andric bool NeedBswap = false;
86006c3fb27SDimitry Andric bool NeedRotate = false;
86106c3fb27SDimitry Andric if (!checkOffsets(DL.isLittleEndian())) {
86206c3fb27SDimitry Andric // Special-case: check if byte offsets line up for the opposite endian.
86306c3fb27SDimitry Andric if (NarrowBits == 8 && checkOffsets(DL.isBigEndian()))
86406c3fb27SDimitry Andric NeedBswap = true;
86506c3fb27SDimitry Andric else if (NumStoresFound == 2 && checkOffsets(DL.isBigEndian()))
86606c3fb27SDimitry Andric NeedRotate = true;
86706c3fb27SDimitry Andric else
86806c3fb27SDimitry Andric return false;
86906c3fb27SDimitry Andric }
87006c3fb27SDimitry Andric
87106c3fb27SDimitry Andric if (NeedBswap &&
87206c3fb27SDimitry Andric !isLegalOrBeforeLegalizer({TargetOpcode::G_BSWAP, {WideStoreTy}}, *MF))
87306c3fb27SDimitry Andric return false;
87406c3fb27SDimitry Andric if (NeedRotate &&
87506c3fb27SDimitry Andric !isLegalOrBeforeLegalizer(
87606c3fb27SDimitry Andric {TargetOpcode::G_ROTR, {WideStoreTy, WideStoreTy}}, *MF))
87706c3fb27SDimitry Andric return false;
87806c3fb27SDimitry Andric
87906c3fb27SDimitry Andric Builder.setInstrAndDebugLoc(StoreMI);
88006c3fb27SDimitry Andric
88106c3fb27SDimitry Andric if (WideStoreTy != MRI->getType(WideSrcVal))
88206c3fb27SDimitry Andric WideSrcVal = Builder.buildTrunc(WideStoreTy, WideSrcVal).getReg(0);
88306c3fb27SDimitry Andric
88406c3fb27SDimitry Andric if (NeedBswap) {
88506c3fb27SDimitry Andric WideSrcVal = Builder.buildBSwap(WideStoreTy, WideSrcVal).getReg(0);
88606c3fb27SDimitry Andric } else if (NeedRotate) {
88706c3fb27SDimitry Andric assert(WideStoreTy.getSizeInBits() % 2 == 0 &&
88806c3fb27SDimitry Andric "Unexpected type for rotate");
88906c3fb27SDimitry Andric auto RotAmt =
89006c3fb27SDimitry Andric Builder.buildConstant(WideStoreTy, WideStoreTy.getSizeInBits() / 2);
89106c3fb27SDimitry Andric WideSrcVal =
89206c3fb27SDimitry Andric Builder.buildRotateRight(WideStoreTy, WideSrcVal, RotAmt).getReg(0);
89306c3fb27SDimitry Andric }
89406c3fb27SDimitry Andric
89506c3fb27SDimitry Andric Builder.buildStore(WideSrcVal, LowestIdxStore->getPointerReg(),
89606c3fb27SDimitry Andric LowestIdxStore->getMMO().getPointerInfo(),
89706c3fb27SDimitry Andric LowestIdxStore->getMMO().getAlign());
89806c3fb27SDimitry Andric
89906c3fb27SDimitry Andric // Erase the old stores.
90006c3fb27SDimitry Andric for (auto *ST : FoundStores) {
90106c3fb27SDimitry Andric ST->eraseFromParent();
90206c3fb27SDimitry Andric DeletedStores.insert(ST);
90306c3fb27SDimitry Andric }
90406c3fb27SDimitry Andric return true;
90506c3fb27SDimitry Andric }
90606c3fb27SDimitry Andric
mergeTruncStoresBlock(MachineBasicBlock & BB)90706c3fb27SDimitry Andric bool LoadStoreOpt::mergeTruncStoresBlock(MachineBasicBlock &BB) {
90806c3fb27SDimitry Andric bool Changed = false;
90906c3fb27SDimitry Andric SmallVector<GStore *, 16> Stores;
91006c3fb27SDimitry Andric SmallPtrSet<GStore *, 8> DeletedStores;
91106c3fb27SDimitry Andric // Walk up the block so we can see the most eligible stores.
91206c3fb27SDimitry Andric for (MachineInstr &MI : llvm::reverse(BB))
91306c3fb27SDimitry Andric if (auto *StoreMI = dyn_cast<GStore>(&MI))
91406c3fb27SDimitry Andric Stores.emplace_back(StoreMI);
91506c3fb27SDimitry Andric
91606c3fb27SDimitry Andric for (auto *StoreMI : Stores) {
91706c3fb27SDimitry Andric if (DeletedStores.count(StoreMI))
91806c3fb27SDimitry Andric continue;
91906c3fb27SDimitry Andric if (mergeTruncStore(*StoreMI, DeletedStores))
92006c3fb27SDimitry Andric Changed = true;
92106c3fb27SDimitry Andric }
92206c3fb27SDimitry Andric return Changed;
92306c3fb27SDimitry Andric }
92406c3fb27SDimitry Andric
mergeFunctionStores(MachineFunction & MF)925349cc55cSDimitry Andric bool LoadStoreOpt::mergeFunctionStores(MachineFunction &MF) {
926349cc55cSDimitry Andric bool Changed = false;
927349cc55cSDimitry Andric for (auto &BB : MF){
928349cc55cSDimitry Andric Changed |= mergeBlockStores(BB);
92906c3fb27SDimitry Andric Changed |= mergeTruncStoresBlock(BB);
930349cc55cSDimitry Andric }
93106c3fb27SDimitry Andric
93206c3fb27SDimitry Andric // Erase all dead instructions left over by the merging.
93306c3fb27SDimitry Andric if (Changed) {
93406c3fb27SDimitry Andric for (auto &BB : MF) {
93506c3fb27SDimitry Andric for (auto &I : make_early_inc_range(make_range(BB.rbegin(), BB.rend()))) {
93606c3fb27SDimitry Andric if (isTriviallyDead(I, *MRI))
93706c3fb27SDimitry Andric I.eraseFromParent();
93806c3fb27SDimitry Andric }
93906c3fb27SDimitry Andric }
94006c3fb27SDimitry Andric }
94106c3fb27SDimitry Andric
942349cc55cSDimitry Andric return Changed;
943349cc55cSDimitry Andric }
944349cc55cSDimitry Andric
initializeStoreMergeTargetInfo(unsigned AddrSpace)945349cc55cSDimitry Andric void LoadStoreOpt::initializeStoreMergeTargetInfo(unsigned AddrSpace) {
946349cc55cSDimitry Andric // Query the legalizer info to record what store types are legal.
947349cc55cSDimitry Andric // We record this because we don't want to bother trying to merge stores into
948349cc55cSDimitry Andric // illegal ones, which would just result in being split again.
949349cc55cSDimitry Andric
950349cc55cSDimitry Andric if (LegalStoreSizes.count(AddrSpace)) {
951349cc55cSDimitry Andric assert(LegalStoreSizes[AddrSpace].any());
952349cc55cSDimitry Andric return; // Already cached sizes for this address space.
953349cc55cSDimitry Andric }
954349cc55cSDimitry Andric
955349cc55cSDimitry Andric // Need to reserve at least MaxStoreSizeToForm + 1 bits.
956349cc55cSDimitry Andric BitVector LegalSizes(MaxStoreSizeToForm * 2);
957349cc55cSDimitry Andric const auto &LI = *MF->getSubtarget().getLegalizerInfo();
958*0fca6ea1SDimitry Andric const auto &DL = MF->getFunction().getDataLayout();
9595f757f3fSDimitry Andric Type *IRPtrTy = PointerType::get(MF->getFunction().getContext(), AddrSpace);
9605f757f3fSDimitry Andric LLT PtrTy = getLLTForType(*IRPtrTy, DL);
961349cc55cSDimitry Andric // We assume that we're not going to be generating any stores wider than
962349cc55cSDimitry Andric // MaxStoreSizeToForm bits for now.
963349cc55cSDimitry Andric for (unsigned Size = 2; Size <= MaxStoreSizeToForm; Size *= 2) {
964349cc55cSDimitry Andric LLT Ty = LLT::scalar(Size);
965349cc55cSDimitry Andric SmallVector<LegalityQuery::MemDesc, 2> MemDescrs(
966349cc55cSDimitry Andric {{Ty, Ty.getSizeInBits(), AtomicOrdering::NotAtomic}});
967349cc55cSDimitry Andric SmallVector<LLT> StoreTys({Ty, PtrTy});
968349cc55cSDimitry Andric LegalityQuery Q(TargetOpcode::G_STORE, StoreTys, MemDescrs);
969349cc55cSDimitry Andric LegalizeActionStep ActionStep = LI.getAction(Q);
970349cc55cSDimitry Andric if (ActionStep.Action == LegalizeActions::Legal)
971349cc55cSDimitry Andric LegalSizes.set(Size);
972349cc55cSDimitry Andric }
973349cc55cSDimitry Andric assert(LegalSizes.any() && "Expected some store sizes to be legal!");
974349cc55cSDimitry Andric LegalStoreSizes[AddrSpace] = LegalSizes;
975349cc55cSDimitry Andric }
976349cc55cSDimitry Andric
runOnMachineFunction(MachineFunction & MF)977349cc55cSDimitry Andric bool LoadStoreOpt::runOnMachineFunction(MachineFunction &MF) {
978349cc55cSDimitry Andric // If the ISel pipeline failed, do not bother running that pass.
979349cc55cSDimitry Andric if (MF.getProperties().hasProperty(
980349cc55cSDimitry Andric MachineFunctionProperties::Property::FailedISel))
981349cc55cSDimitry Andric return false;
982349cc55cSDimitry Andric
983349cc55cSDimitry Andric LLVM_DEBUG(dbgs() << "Begin memory optimizations for: " << MF.getName()
984349cc55cSDimitry Andric << '\n');
985349cc55cSDimitry Andric
986349cc55cSDimitry Andric init(MF);
987349cc55cSDimitry Andric bool Changed = false;
988349cc55cSDimitry Andric Changed |= mergeFunctionStores(MF);
989349cc55cSDimitry Andric
990349cc55cSDimitry Andric LegalStoreSizes.clear();
991349cc55cSDimitry Andric return Changed;
992349cc55cSDimitry Andric }
993