1 //=- AArch64MachineFunctionInfo.h - AArch64 machine function info -*- C++ -*-=// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file declares AArch64-specific per-machine-function information. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #ifndef LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H 14 #define LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H 15 16 #include "llvm/ADT/ArrayRef.h" 17 #include "llvm/ADT/Optional.h" 18 #include "llvm/ADT/SmallPtrSet.h" 19 #include "llvm/ADT/SmallVector.h" 20 #include "llvm/CodeGen/CallingConvLower.h" 21 #include "llvm/CodeGen/MIRYamlMapping.h" 22 #include "llvm/CodeGen/MachineFrameInfo.h" 23 #include "llvm/CodeGen/MachineFunction.h" 24 #include "llvm/IR/Function.h" 25 #include "llvm/MC/MCLinkerOptimizationHint.h" 26 #include <cassert> 27 28 namespace llvm { 29 30 namespace yaml { 31 struct AArch64FunctionInfo; 32 } // end namespace yaml 33 34 class MachineInstr; 35 36 /// AArch64FunctionInfo - This class is derived from MachineFunctionInfo and 37 /// contains private AArch64-specific information for each MachineFunction. 38 class AArch64FunctionInfo final : public MachineFunctionInfo { 39 /// Backreference to the machine function. 40 MachineFunction *MF; 41 42 /// Number of bytes of arguments this function has on the stack. If the callee 43 /// is expected to restore the argument stack this should be a multiple of 16, 44 /// all usable during a tail call. 45 /// 46 /// The alternative would forbid tail call optimisation in some cases: if we 47 /// want to transfer control from a function with 8-bytes of stack-argument 48 /// space to a function with 16-bytes then misalignment of this value would 49 /// make a stack adjustment necessary, which could not be undone by the 50 /// callee. 51 unsigned BytesInStackArgArea = 0; 52 53 /// The number of bytes to restore to deallocate space for incoming 54 /// arguments. Canonically 0 in the C calling convention, but non-zero when 55 /// callee is expected to pop the args. 56 unsigned ArgumentStackToRestore = 0; 57 58 /// Space just below incoming stack pointer reserved for arguments being 59 /// passed on the stack during a tail call. This will be the difference 60 /// between the largest tail call argument space needed in this function and 61 /// what's already available by reusing space of incoming arguments. 62 unsigned TailCallReservedStack = 0; 63 64 /// HasStackFrame - True if this function has a stack frame. Set by 65 /// determineCalleeSaves(). 66 bool HasStackFrame = false; 67 68 /// Amount of stack frame size, not including callee-saved registers. 69 uint64_t LocalStackSize = 0; 70 71 /// The start and end frame indices for the SVE callee saves. 72 int MinSVECSFrameIndex = 0; 73 int MaxSVECSFrameIndex = 0; 74 75 /// Amount of stack frame size used for saving callee-saved registers. 76 unsigned CalleeSavedStackSize = 0; 77 unsigned SVECalleeSavedStackSize = 0; 78 bool HasCalleeSavedStackSize = false; 79 80 /// Number of TLS accesses using the special (combinable) 81 /// _TLS_MODULE_BASE_ symbol. 82 unsigned NumLocalDynamicTLSAccesses = 0; 83 84 /// FrameIndex for start of varargs area for arguments passed on the 85 /// stack. 86 int VarArgsStackIndex = 0; 87 88 /// FrameIndex for start of varargs area for arguments passed in 89 /// general purpose registers. 90 int VarArgsGPRIndex = 0; 91 92 /// Size of the varargs area for arguments passed in general purpose 93 /// registers. 94 unsigned VarArgsGPRSize = 0; 95 96 /// FrameIndex for start of varargs area for arguments passed in 97 /// floating-point registers. 98 int VarArgsFPRIndex = 0; 99 100 /// Size of the varargs area for arguments passed in floating-point 101 /// registers. 102 unsigned VarArgsFPRSize = 0; 103 104 /// True if this function has a subset of CSRs that is handled explicitly via 105 /// copies. 106 bool IsSplitCSR = false; 107 108 /// True when the stack gets realigned dynamically because the size of stack 109 /// frame is unknown at compile time. e.g., in case of VLAs. 110 bool StackRealigned = false; 111 112 /// True when the callee-save stack area has unused gaps that may be used for 113 /// other stack allocations. 114 bool CalleeSaveStackHasFreeSpace = false; 115 116 /// SRetReturnReg - sret lowering includes returning the value of the 117 /// returned struct in a register. This field holds the virtual register into 118 /// which the sret argument is passed. 119 Register SRetReturnReg; 120 121 /// SVE stack size (for predicates and data vectors) are maintained here 122 /// rather than in FrameInfo, as the placement and Stack IDs are target 123 /// specific. 124 uint64_t StackSizeSVE = 0; 125 126 /// HasCalculatedStackSizeSVE indicates whether StackSizeSVE is valid. 127 bool HasCalculatedStackSizeSVE = false; 128 129 /// Has a value when it is known whether or not the function uses a 130 /// redzone, and no value otherwise. 131 /// Initialized during frame lowering, unless the function has the noredzone 132 /// attribute, in which case it is set to false at construction. 133 Optional<bool> HasRedZone; 134 135 /// ForwardedMustTailRegParms - A list of virtual and physical registers 136 /// that must be forwarded to every musttail call. 137 SmallVector<ForwardedRegister, 1> ForwardedMustTailRegParms; 138 139 /// FrameIndex for the tagged base pointer. 140 Optional<int> TaggedBasePointerIndex; 141 142 /// Offset from SP-at-entry to the tagged base pointer. 143 /// Tagged base pointer is set up to point to the first (lowest address) 144 /// tagged stack slot. 145 unsigned TaggedBasePointerOffset; 146 147 /// OutliningStyle denotes, if a function was outined, how it was outlined, 148 /// e.g. Tail Call, Thunk, or Function if none apply. 149 Optional<std::string> OutliningStyle; 150 151 // Offset from SP-after-callee-saved-spills (i.e. SP-at-entry minus 152 // CalleeSavedStackSize) to the address of the frame record. 153 int CalleeSaveBaseToFrameRecordOffset = 0; 154 155 /// SignReturnAddress is true if PAC-RET is enabled for the function with 156 /// defaults being sign non-leaf functions only, with the B key. 157 bool SignReturnAddress = false; 158 159 /// SignReturnAddressAll modifies the default PAC-RET mode to signing leaf 160 /// functions as well. 161 bool SignReturnAddressAll = false; 162 163 /// SignWithBKey modifies the default PAC-RET mode to signing with the B key. 164 bool SignWithBKey = false; 165 166 /// BranchTargetEnforcement enables placing BTI instructions at potential 167 /// indirect branch destinations. 168 bool BranchTargetEnforcement = false; 169 170 /// Whether this function has an extended frame record [Ctx, FP, LR]. If so, 171 /// bit 60 of the in-memory FP will be 1 to enable other tools to detect the 172 /// extended record. 173 bool HasSwiftAsyncContext = false; 174 175 /// The stack slot where the Swift asynchronous context is stored. 176 int SwiftAsyncContextFrameIdx = std::numeric_limits<int>::max(); 177 178 bool IsMTETagged = false; 179 180 /// The function has Scalable Vector or Scalable Predicate register argument 181 /// or return type 182 bool IsSVECC = false; 183 184 /// True if the function need unwind information. 185 mutable Optional<bool> NeedsDwarfUnwindInfo; 186 187 /// True if the function need asynchronous unwind information. 188 mutable Optional<bool> NeedsAsyncDwarfUnwindInfo; 189 190 public: 191 explicit AArch64FunctionInfo(MachineFunction &MF); 192 193 MachineFunctionInfo * 194 clone(BumpPtrAllocator &Allocator, MachineFunction &DestMF, 195 const DenseMap<MachineBasicBlock *, MachineBasicBlock *> &Src2DstMBB) 196 const override; 197 198 bool isSVECC() const { return IsSVECC; }; 199 void setIsSVECC(bool s) { IsSVECC = s; }; 200 201 void initializeBaseYamlFields(const yaml::AArch64FunctionInfo &YamlMFI); 202 203 unsigned getBytesInStackArgArea() const { return BytesInStackArgArea; } 204 void setBytesInStackArgArea(unsigned bytes) { BytesInStackArgArea = bytes; } 205 206 unsigned getArgumentStackToRestore() const { return ArgumentStackToRestore; } 207 void setArgumentStackToRestore(unsigned bytes) { 208 ArgumentStackToRestore = bytes; 209 } 210 211 unsigned getTailCallReservedStack() const { return TailCallReservedStack; } 212 void setTailCallReservedStack(unsigned bytes) { 213 TailCallReservedStack = bytes; 214 } 215 216 bool hasCalculatedStackSizeSVE() const { return HasCalculatedStackSizeSVE; } 217 218 void setStackSizeSVE(uint64_t S) { 219 HasCalculatedStackSizeSVE = true; 220 StackSizeSVE = S; 221 } 222 223 uint64_t getStackSizeSVE() const { return StackSizeSVE; } 224 225 bool hasStackFrame() const { return HasStackFrame; } 226 void setHasStackFrame(bool s) { HasStackFrame = s; } 227 228 bool isStackRealigned() const { return StackRealigned; } 229 void setStackRealigned(bool s) { StackRealigned = s; } 230 231 bool hasCalleeSaveStackFreeSpace() const { 232 return CalleeSaveStackHasFreeSpace; 233 } 234 void setCalleeSaveStackHasFreeSpace(bool s) { 235 CalleeSaveStackHasFreeSpace = s; 236 } 237 bool isSplitCSR() const { return IsSplitCSR; } 238 void setIsSplitCSR(bool s) { IsSplitCSR = s; } 239 240 void setLocalStackSize(uint64_t Size) { LocalStackSize = Size; } 241 uint64_t getLocalStackSize() const { return LocalStackSize; } 242 243 void setOutliningStyle(std::string Style) { OutliningStyle = Style; } 244 Optional<std::string> getOutliningStyle() const { return OutliningStyle; } 245 246 void setCalleeSavedStackSize(unsigned Size) { 247 CalleeSavedStackSize = Size; 248 HasCalleeSavedStackSize = true; 249 } 250 251 // When CalleeSavedStackSize has not been set (for example when 252 // some MachineIR pass is run in isolation), then recalculate 253 // the CalleeSavedStackSize directly from the CalleeSavedInfo. 254 // Note: This information can only be recalculated after PEI 255 // has assigned offsets to the callee save objects. 256 unsigned getCalleeSavedStackSize(const MachineFrameInfo &MFI) const { 257 bool ValidateCalleeSavedStackSize = false; 258 259 #ifndef NDEBUG 260 // Make sure the calculated size derived from the CalleeSavedInfo 261 // equals the cached size that was calculated elsewhere (e.g. in 262 // determineCalleeSaves). 263 ValidateCalleeSavedStackSize = HasCalleeSavedStackSize; 264 #endif 265 266 if (!HasCalleeSavedStackSize || ValidateCalleeSavedStackSize) { 267 assert(MFI.isCalleeSavedInfoValid() && "CalleeSavedInfo not calculated"); 268 if (MFI.getCalleeSavedInfo().empty()) 269 return 0; 270 271 int64_t MinOffset = std::numeric_limits<int64_t>::max(); 272 int64_t MaxOffset = std::numeric_limits<int64_t>::min(); 273 for (const auto &Info : MFI.getCalleeSavedInfo()) { 274 int FrameIdx = Info.getFrameIdx(); 275 if (MFI.getStackID(FrameIdx) != TargetStackID::Default) 276 continue; 277 int64_t Offset = MFI.getObjectOffset(FrameIdx); 278 int64_t ObjSize = MFI.getObjectSize(FrameIdx); 279 MinOffset = std::min<int64_t>(Offset, MinOffset); 280 MaxOffset = std::max<int64_t>(Offset + ObjSize, MaxOffset); 281 } 282 283 if (SwiftAsyncContextFrameIdx != std::numeric_limits<int>::max()) { 284 int64_t Offset = MFI.getObjectOffset(getSwiftAsyncContextFrameIdx()); 285 int64_t ObjSize = MFI.getObjectSize(getSwiftAsyncContextFrameIdx()); 286 MinOffset = std::min<int64_t>(Offset, MinOffset); 287 MaxOffset = std::max<int64_t>(Offset + ObjSize, MaxOffset); 288 } 289 290 unsigned Size = alignTo(MaxOffset - MinOffset, 16); 291 assert((!HasCalleeSavedStackSize || getCalleeSavedStackSize() == Size) && 292 "Invalid size calculated for callee saves"); 293 return Size; 294 } 295 296 return getCalleeSavedStackSize(); 297 } 298 299 unsigned getCalleeSavedStackSize() const { 300 assert(HasCalleeSavedStackSize && 301 "CalleeSavedStackSize has not been calculated"); 302 return CalleeSavedStackSize; 303 } 304 305 // Saves the CalleeSavedStackSize for SVE vectors in 'scalable bytes' 306 void setSVECalleeSavedStackSize(unsigned Size) { 307 SVECalleeSavedStackSize = Size; 308 } 309 unsigned getSVECalleeSavedStackSize() const { 310 return SVECalleeSavedStackSize; 311 } 312 313 void setMinMaxSVECSFrameIndex(int Min, int Max) { 314 MinSVECSFrameIndex = Min; 315 MaxSVECSFrameIndex = Max; 316 } 317 318 int getMinSVECSFrameIndex() const { return MinSVECSFrameIndex; } 319 int getMaxSVECSFrameIndex() const { return MaxSVECSFrameIndex; } 320 321 void incNumLocalDynamicTLSAccesses() { ++NumLocalDynamicTLSAccesses; } 322 unsigned getNumLocalDynamicTLSAccesses() const { 323 return NumLocalDynamicTLSAccesses; 324 } 325 326 Optional<bool> hasRedZone() const { return HasRedZone; } 327 void setHasRedZone(bool s) { HasRedZone = s; } 328 329 int getVarArgsStackIndex() const { return VarArgsStackIndex; } 330 void setVarArgsStackIndex(int Index) { VarArgsStackIndex = Index; } 331 332 int getVarArgsGPRIndex() const { return VarArgsGPRIndex; } 333 void setVarArgsGPRIndex(int Index) { VarArgsGPRIndex = Index; } 334 335 unsigned getVarArgsGPRSize() const { return VarArgsGPRSize; } 336 void setVarArgsGPRSize(unsigned Size) { VarArgsGPRSize = Size; } 337 338 int getVarArgsFPRIndex() const { return VarArgsFPRIndex; } 339 void setVarArgsFPRIndex(int Index) { VarArgsFPRIndex = Index; } 340 341 unsigned getVarArgsFPRSize() const { return VarArgsFPRSize; } 342 void setVarArgsFPRSize(unsigned Size) { VarArgsFPRSize = Size; } 343 344 unsigned getSRetReturnReg() const { return SRetReturnReg; } 345 void setSRetReturnReg(unsigned Reg) { SRetReturnReg = Reg; } 346 347 unsigned getJumpTableEntrySize(int Idx) const { 348 return JumpTableEntryInfo[Idx].first; 349 } 350 MCSymbol *getJumpTableEntryPCRelSymbol(int Idx) const { 351 return JumpTableEntryInfo[Idx].second; 352 } 353 void setJumpTableEntryInfo(int Idx, unsigned Size, MCSymbol *PCRelSym) { 354 if ((unsigned)Idx >= JumpTableEntryInfo.size()) 355 JumpTableEntryInfo.resize(Idx+1); 356 JumpTableEntryInfo[Idx] = std::make_pair(Size, PCRelSym); 357 } 358 359 using SetOfInstructions = SmallPtrSet<const MachineInstr *, 16>; 360 361 const SetOfInstructions &getLOHRelated() const { return LOHRelated; } 362 363 // Shortcuts for LOH related types. 364 class MILOHDirective { 365 MCLOHType Kind; 366 367 /// Arguments of this directive. Order matters. 368 SmallVector<const MachineInstr *, 3> Args; 369 370 public: 371 using LOHArgs = ArrayRef<const MachineInstr *>; 372 373 MILOHDirective(MCLOHType Kind, LOHArgs Args) 374 : Kind(Kind), Args(Args.begin(), Args.end()) { 375 assert(isValidMCLOHType(Kind) && "Invalid LOH directive type!"); 376 } 377 378 MCLOHType getKind() const { return Kind; } 379 LOHArgs getArgs() const { return Args; } 380 }; 381 382 using MILOHArgs = MILOHDirective::LOHArgs; 383 using MILOHContainer = SmallVector<MILOHDirective, 32>; 384 385 const MILOHContainer &getLOHContainer() const { return LOHContainerSet; } 386 387 /// Add a LOH directive of this @p Kind and this @p Args. 388 void addLOHDirective(MCLOHType Kind, MILOHArgs Args) { 389 LOHContainerSet.push_back(MILOHDirective(Kind, Args)); 390 LOHRelated.insert(Args.begin(), Args.end()); 391 } 392 393 SmallVectorImpl<ForwardedRegister> &getForwardedMustTailRegParms() { 394 return ForwardedMustTailRegParms; 395 } 396 397 Optional<int> getTaggedBasePointerIndex() const { 398 return TaggedBasePointerIndex; 399 } 400 void setTaggedBasePointerIndex(int Index) { TaggedBasePointerIndex = Index; } 401 402 unsigned getTaggedBasePointerOffset() const { 403 return TaggedBasePointerOffset; 404 } 405 void setTaggedBasePointerOffset(unsigned Offset) { 406 TaggedBasePointerOffset = Offset; 407 } 408 409 int getCalleeSaveBaseToFrameRecordOffset() const { 410 return CalleeSaveBaseToFrameRecordOffset; 411 } 412 void setCalleeSaveBaseToFrameRecordOffset(int Offset) { 413 CalleeSaveBaseToFrameRecordOffset = Offset; 414 } 415 416 bool shouldSignReturnAddress() const; 417 bool shouldSignReturnAddress(bool SpillsLR) const; 418 419 bool shouldSignWithBKey() const { return SignWithBKey; } 420 bool isMTETagged() const { return IsMTETagged; } 421 422 bool branchTargetEnforcement() const { return BranchTargetEnforcement; } 423 424 void setHasSwiftAsyncContext(bool HasContext) { 425 HasSwiftAsyncContext = HasContext; 426 } 427 bool hasSwiftAsyncContext() const { return HasSwiftAsyncContext; } 428 429 void setSwiftAsyncContextFrameIdx(int FI) { 430 SwiftAsyncContextFrameIdx = FI; 431 } 432 int getSwiftAsyncContextFrameIdx() const { return SwiftAsyncContextFrameIdx; } 433 434 bool needsDwarfUnwindInfo() const; 435 bool needsAsyncDwarfUnwindInfo() const; 436 437 private: 438 // Hold the lists of LOHs. 439 MILOHContainer LOHContainerSet; 440 SetOfInstructions LOHRelated; 441 442 SmallVector<std::pair<unsigned, MCSymbol *>, 2> JumpTableEntryInfo; 443 }; 444 445 namespace yaml { 446 struct AArch64FunctionInfo final : public yaml::MachineFunctionInfo { 447 Optional<bool> HasRedZone; 448 449 AArch64FunctionInfo() = default; 450 AArch64FunctionInfo(const llvm::AArch64FunctionInfo &MFI); 451 452 void mappingImpl(yaml::IO &YamlIO) override; 453 ~AArch64FunctionInfo() = default; 454 }; 455 456 template <> struct MappingTraits<AArch64FunctionInfo> { 457 static void mapping(IO &YamlIO, AArch64FunctionInfo &MFI) { 458 YamlIO.mapOptional("hasRedZone", MFI.HasRedZone); 459 } 460 }; 461 462 } // end namespace yaml 463 464 } // end namespace llvm 465 466 #endif // LLVM_LIB_TARGET_AARCH64_AARCH64MACHINEFUNCTIONINFO_H 467