/freebsd/contrib/llvm-project/llvm/lib/Target/PowerPC/MCTargetDesc/ |
H A D | PPCMCTargetDesc.h | 104 if (isShiftedMask_64(Val)) { 112 if (isShiftedMask_64(Val)) {
|
/freebsd/contrib/llvm-project/llvm/include/llvm/Support/ |
H A D | MathExtras.h | 285 constexpr bool isShiftedMask_64(uint64_t Value) { in isShiftedMask_64() function 318 inline bool isShiftedMask_64(uint64_t Value, unsigned &MaskIdx, in isShiftedMask_64() function 320 if (!isShiftedMask_64(Value)) in isShiftedMask_64()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/ |
H A D | AArch64AddressingModes.h | 238 if (isShiftedMask_64(Imm)) { in processLogicalImmediate() 244 if (!isShiftedMask_64(~Imm)) in processLogicalImmediate()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/VE/ |
H A D | VE.h | 341 return (Val & (UINT64_C(1) << 63)) && isShiftedMask_64(Val); in isMImmVal()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/RISCV/ |
H A D | RISCVISelDAGToDAG.cpp | 1085 if (ShAmt <= 32 && isShiftedMask_64(Mask)) { in Select() 1114 if (isShiftedMask_64(Mask) && N0.hasOneUse()) { in Select() 1332 if (LeftShift && isShiftedMask_64(C1)) { in Select() 1362 if (!LeftShift && isShiftedMask_64(C1)) { in Select() 1400 if (LeftShift && isShiftedMask_64(C1)) { in Select() 2952 if (isShiftedMask_64(Mask)) { in selectSHXADDOp() 2988 if (isShiftedMask_64(Mask)) { in selectSHXADDOp() 3042 if (isShiftedMask_64(Mask)) { in selectSHXADD_UWOp()
|
H A D | RISCVTargetTransformInfo.cpp | 152 if (isShiftedMask_64(Mask)) { in canUseShiftPair()
|
H A D | RISCVInstrInfo.td | 492 return !isInt<32>(Imm) && isUInt<32>(Imm) && isShiftedMask_64(Imm) &&
|
/freebsd/contrib/llvm-project/llvm/lib/Target/PowerPC/ |
H A D | PPCTargetTransformInfo.cpp | 301 (isShiftedMask_64(Imm.getZExtValue()) || in getIntImmCostInst() 302 isShiftedMask_64(~Imm.getZExtValue()))) in getIntImmCostInst()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/LoongArch/ |
H A D | LoongArchISelLowering.cpp | 3122 !isShiftedMask_64(CN->getZExtValue(), SMIdx, SMLen)) in performANDCombine() 3213 !isShiftedMask_64(CN->getZExtValue(), MaskIdx, MaskLen)) in performSRLCombine() 3257 isShiftedMask_64(~CN0->getSExtValue(), MaskIdx0, MaskLen0) && in performORCombine() 3260 isShiftedMask_64(CN1->getZExtValue(), MaskIdx1, MaskLen1) && in performORCombine() 3279 isShiftedMask_64(~CN0->getSExtValue(), MaskIdx0, MaskLen0) && in performORCombine() 3284 isShiftedMask_64(CN1->getZExtValue(), MaskIdx1, MaskLen1) && in performORCombine() 3302 isShiftedMask_64(~CN0->getSExtValue(), MaskIdx0, MaskLen0) && in performORCombine() 3325 isShiftedMask_64(CN0->getZExtValue(), MaskIdx0, MaskLen0) && in performORCombine() 3344 isShiftedMask_64(~CN0->getSExtValue(), MaskIdx0, MaskLen0) && in performORCombine() 3370 isShiftedMask_64(CNMask->getZExtValue(), MaskIdx, MaskLen) && in performORCombine() [all …]
|
H A D | LoongArchInstrInfo.td | 597 : llvm::isShiftedMask_64(~Imm, MaskIdx, MaskLen); 605 : llvm::isShiftedMask_64(~Imm, MaskIdx, MaskLen); 615 : llvm::isShiftedMask_64(~Imm, MaskIdx, MaskLen);
|
/freebsd/contrib/llvm-project/llvm/include/llvm/ADT/ |
H A D | APInt.h | 492 return isShiftedMask_64(U.VAL); in isShiftedMask() 504 return isShiftedMask_64(U.VAL, MaskIdx, MaskLen); in isShiftedMask()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/SystemZ/ |
H A D | SystemZInstrInfo.cpp | 1981 if (isShiftedMask_64(Mask, LSB, Length)) { in isRxSBGMask() 1989 if (isShiftedMask_64(Mask ^ allOnes(BitSize), LSB, Length)) { in isRxSBGMask()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/Mips/ |
H A D | MipsISelLowering.cpp | 813 !isShiftedMask_64(CN->getZExtValue(), SMPos, SMSize)) in performANDCombine() 895 !isShiftedMask_64(~CN->getSExtValue(), SMPos0, SMSize0)) in performORCombine() 903 !isShiftedMask_64(CN->getZExtValue(), SMPos1, SMSize1)) in performORCombine() 1152 !isShiftedMask_64(CN->getZExtValue(), SMPos, SMSize)) in performSHLCombine()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/ |
H A D | AArch64ISelDAGToDAG.cpp | 3061 if (!isShiftedMask_64(NonZeroBits)) in isBitfieldPositioningOp() 3083 assert(isShiftedMask_64(NonZeroBits) && "Caller guaranteed"); in isBitfieldPositioningOpFromAnd() 3205 assert(isShiftedMask_64(NonZeroBits) && "Caller guaranteed"); in isBitfieldPositioningOpFromShl() 3236 return isShiftedMask_64(Mask); in isShiftedMask() 3344 isShiftedMask_64(AndImm)) { in isWorthFoldingIntoOrrWithShift()
|
H A D | AArch64ISelLowering.cpp | 2207 if (isShiftedMask_64(NewImm) || isShiftedMask_64(~(NewImm | ~Mask))) in optimizeLogicalImm()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/X86/ |
H A D | X86ISelDAGToDAG.cpp | 2190 if (!isShiftedMask_64(Mask, MaskIdx, MaskLen)) in foldMaskAndShiftToScale() 2289 if (!isShiftedMask_64(Mask, MaskIdx, MaskLen)) in foldMaskedShiftToBEXTR() 6034 if (CmpVT == MVT::i64 && !isInt<8>(Mask) && isShiftedMask_64(Mask) && in Select()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/NVPTX/ |
H A D | NVPTXISelDAGToDAG.cpp | 3574 } else if (isShiftedMask_64(MaskVal)) { in tryBFE()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AArch64/GISel/ |
H A D | AArch64InstructionSelector.cpp | 3133 if (!MaskVal || !isShiftedMask_64(*MaskVal)) in select()
|
/freebsd/contrib/llvm-project/llvm/lib/Target/AMDGPU/ |
H A D | SIISelLowering.cpp | 11530 (Bits == 8 || Bits == 16) && isShiftedMask_64(Mask) && !(Mask & 1)) { in performAndCombine()
|