#include "AArch64MachineFunctionInfo.h"
#include "AArch64TargetMachine.h"
#include "MCTargetDesc/AArch64AddressingModes.h"
#include "llvm/ADT/APSInt.h"
#include "llvm/CodeGen/ISDOpcodes.h"
#include "llvm/CodeGen/SelectionDAGISel.h"
#include "llvm/IR/Function.h"
#include "llvm/IR/GlobalValue.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/IntrinsicsAArch64.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/ErrorHandling.h"
#include "llvm/Support/KnownBits.h"
#include "llvm/Support/MathExtras.h"
#include "llvm/Support/raw_ostream.h"
usingnamespacellvm;
#define DEBUG_TYPE …
#define PASS_NAME …
namespace {
class AArch64DAGToDAGISel : public SelectionDAGISel { … };
class AArch64DAGToDAGISelLegacy : public SelectionDAGISelLegacy { … };
}
char AArch64DAGToDAGISelLegacy::ID = …;
INITIALIZE_PASS(…)
static bool isIntImmediate(const SDNode *N, uint64_t &Imm) { … }
static bool isIntImmediate(SDValue N, uint64_t &Imm) { … }
static bool isOpcWithIntImmediate(const SDNode *N, unsigned Opc,
uint64_t &Imm) { … }
#ifndef NDEBUG
static bool isIntImmediateEq(SDValue N, const uint64_t ImmExpected) {
uint64_t Imm;
if (!isIntImmediate(N.getNode(), Imm))
return false;
return Imm == ImmExpected;
}
#endif
bool AArch64DAGToDAGISel::SelectInlineAsmMemoryOperand(
const SDValue &Op, const InlineAsm::ConstraintCode ConstraintID,
std::vector<SDValue> &OutOps) { … }
bool AArch64DAGToDAGISel::SelectArithImmed(SDValue N, SDValue &Val,
SDValue &Shift) { … }
bool AArch64DAGToDAGISel::SelectNegArithImmed(SDValue N, SDValue &Val,
SDValue &Shift) { … }
static AArch64_AM::ShiftExtendType getShiftTypeForNode(SDValue N) { … }
static bool isWorthFoldingSHL(SDValue V) { … }
bool AArch64DAGToDAGISel::isWorthFoldingAddr(SDValue V, unsigned Size) const { … }
bool AArch64DAGToDAGISel::SelectShiftedRegisterFromAnd(SDValue N, SDValue &Reg,
SDValue &Shift) { … }
static AArch64_AM::ShiftExtendType
getExtendTypeForNode(SDValue N, bool IsLoadStore = false) { … }
bool AArch64DAGToDAGISel::isWorthFoldingALU(SDValue V, bool LSL) const { … }
bool AArch64DAGToDAGISel::SelectShiftedRegister(SDValue N, bool AllowROR,
SDValue &Reg, SDValue &Shift) { … }
static SDValue narrowIfNeeded(SelectionDAG *CurDAG, SDValue N) { … }
template<signed Low, signed High, signed Scale>
bool AArch64DAGToDAGISel::SelectRDVLImm(SDValue N, SDValue &Imm) { … }
bool AArch64DAGToDAGISel::SelectArithExtendedRegister(SDValue N, SDValue &Reg,
SDValue &Shift) { … }
bool AArch64DAGToDAGISel::SelectArithUXTXRegister(SDValue N, SDValue &Reg,
SDValue &Shift) { … }
static bool isWorthFoldingADDlow(SDValue N) { … }
static bool isValidAsScaledImmediate(int64_t Offset, unsigned Range,
unsigned Size) { … }
bool AArch64DAGToDAGISel::SelectAddrModeIndexedBitWidth(SDValue N, bool IsSignedImm,
unsigned BW, unsigned Size,
SDValue &Base,
SDValue &OffImm) { … }
bool AArch64DAGToDAGISel::SelectAddrModeIndexed(SDValue N, unsigned Size,
SDValue &Base, SDValue &OffImm) { … }
bool AArch64DAGToDAGISel::SelectAddrModeUnscaled(SDValue N, unsigned Size,
SDValue &Base,
SDValue &OffImm) { … }
static SDValue Widen(SelectionDAG *CurDAG, SDValue N) { … }
bool AArch64DAGToDAGISel::SelectExtendedSHL(SDValue N, unsigned Size,
bool WantExtend, SDValue &Offset,
SDValue &SignExtend) { … }
bool AArch64DAGToDAGISel::SelectAddrModeWRO(SDValue N, unsigned Size,
SDValue &Base, SDValue &Offset,
SDValue &SignExtend,
SDValue &DoShift) { … }
static bool isPreferredADD(int64_t ImmOff) { … }
bool AArch64DAGToDAGISel::SelectAddrModeXRO(SDValue N, unsigned Size,
SDValue &Base, SDValue &Offset,
SDValue &SignExtend,
SDValue &DoShift) { … }
SDValue AArch64DAGToDAGISel::createDTuple(ArrayRef<SDValue> Regs) { … }
SDValue AArch64DAGToDAGISel::createQTuple(ArrayRef<SDValue> Regs) { … }
SDValue AArch64DAGToDAGISel::createZTuple(ArrayRef<SDValue> Regs) { … }
SDValue AArch64DAGToDAGISel::createZMulTuple(ArrayRef<SDValue> Regs) { … }
SDValue AArch64DAGToDAGISel::createTuple(ArrayRef<SDValue> Regs,
const unsigned RegClassIDs[],
const unsigned SubRegs[]) { … }
void AArch64DAGToDAGISel::SelectTable(SDNode *N, unsigned NumVecs, unsigned Opc,
bool isExt) { … }
static std::tuple<SDValue, SDValue>
extractPtrauthBlendDiscriminators(SDValue Disc, SelectionDAG *DAG) { … }
void AArch64DAGToDAGISel::SelectPtrauthAuth(SDNode *N) { … }
void AArch64DAGToDAGISel::SelectPtrauthResign(SDNode *N) { … }
bool AArch64DAGToDAGISel::tryIndexedLoad(SDNode *N) { … }
void AArch64DAGToDAGISel::SelectLoad(SDNode *N, unsigned NumVecs, unsigned Opc,
unsigned SubRegIdx) { … }
void AArch64DAGToDAGISel::SelectPostLoad(SDNode *N, unsigned NumVecs,
unsigned Opc, unsigned SubRegIdx) { … }
std::tuple<unsigned, SDValue, SDValue>
AArch64DAGToDAGISel::findAddrModeSVELoadStore(SDNode *N, unsigned Opc_rr,
unsigned Opc_ri,
const SDValue &OldBase,
const SDValue &OldOffset,
unsigned Scale) { … }
enum class SelectTypeKind { … };
template <SelectTypeKind Kind>
static unsigned SelectOpcodeFromVT(EVT VT, ArrayRef<unsigned> Opcodes) { … }
void AArch64DAGToDAGISel::SelectPExtPair(SDNode *N, unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectWhilePair(SDNode *N, unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectCVTIntrinsic(SDNode *N, unsigned NumVecs,
unsigned Opcode) { … }
void AArch64DAGToDAGISel::SelectDestructiveMultiIntrinsic(SDNode *N,
unsigned NumVecs,
bool IsZmMulti,
unsigned Opcode,
bool HasPred) { … }
void AArch64DAGToDAGISel::SelectPredicatedLoad(SDNode *N, unsigned NumVecs,
unsigned Scale, unsigned Opc_ri,
unsigned Opc_rr, bool IsIntr) { … }
void AArch64DAGToDAGISel::SelectContiguousMultiVectorLoad(SDNode *N,
unsigned NumVecs,
unsigned Scale,
unsigned Opc_ri,
unsigned Opc_rr) { … }
void AArch64DAGToDAGISel::SelectFrintFromVT(SDNode *N, unsigned NumVecs,
unsigned Opcode) { … }
void AArch64DAGToDAGISel::SelectMultiVectorLutiLane(SDNode *Node,
unsigned NumOutVecs,
unsigned Opc,
uint32_t MaxImm) { … }
void AArch64DAGToDAGISel::SelectMultiVectorLuti(SDNode *Node,
unsigned NumOutVecs,
unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectClamp(SDNode *N, unsigned NumVecs,
unsigned Op) { … }
bool SelectSMETile(unsigned &BaseReg, unsigned TileNum) { … }
template <unsigned MaxIdx, unsigned Scale>
void AArch64DAGToDAGISel::SelectMultiVectorMove(SDNode *N, unsigned NumVecs,
unsigned BaseReg, unsigned Op) { … }
void AArch64DAGToDAGISel::SelectMultiVectorMoveZ(SDNode *N, unsigned NumVecs,
unsigned Op, unsigned MaxIdx,
unsigned Scale, unsigned BaseReg) { … }
void AArch64DAGToDAGISel::SelectUnaryMultiIntrinsic(SDNode *N,
unsigned NumOutVecs,
bool IsTupleInput,
unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectStore(SDNode *N, unsigned NumVecs,
unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectPredicatedStore(SDNode *N, unsigned NumVecs,
unsigned Scale, unsigned Opc_rr,
unsigned Opc_ri) { … }
bool AArch64DAGToDAGISel::SelectAddrModeFrameIndexSVE(SDValue N, SDValue &Base,
SDValue &OffImm) { … }
void AArch64DAGToDAGISel::SelectPostStore(SDNode *N, unsigned NumVecs,
unsigned Opc) { … }
namespace {
class WidenVector { … };
}
static SDValue NarrowVector(SDValue V128Reg, SelectionDAG &DAG) { … }
void AArch64DAGToDAGISel::SelectLoadLane(SDNode *N, unsigned NumVecs,
unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectPostLoadLane(SDNode *N, unsigned NumVecs,
unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectStoreLane(SDNode *N, unsigned NumVecs,
unsigned Opc) { … }
void AArch64DAGToDAGISel::SelectPostStoreLane(SDNode *N, unsigned NumVecs,
unsigned Opc) { … }
static bool isBitfieldExtractOpFromAnd(SelectionDAG *CurDAG, SDNode *N,
unsigned &Opc, SDValue &Opd0,
unsigned &LSB, unsigned &MSB,
unsigned NumberOfIgnoredLowBits,
bool BiggerPattern) { … }
static bool isBitfieldExtractOpFromSExtInReg(SDNode *N, unsigned &Opc,
SDValue &Opd0, unsigned &Immr,
unsigned &Imms) { … }
static bool isSeveralBitsExtractOpFromShr(SDNode *N, unsigned &Opc,
SDValue &Opd0, unsigned &LSB,
unsigned &MSB) { … }
static bool isBitfieldExtractOpFromShr(SDNode *N, unsigned &Opc, SDValue &Opd0,
unsigned &Immr, unsigned &Imms,
bool BiggerPattern) { … }
bool AArch64DAGToDAGISel::tryBitfieldExtractOpFromSExt(SDNode *N) { … }
static bool isBitfieldExtractOp(SelectionDAG *CurDAG, SDNode *N, unsigned &Opc,
SDValue &Opd0, unsigned &Immr, unsigned &Imms,
unsigned NumberOfIgnoredLowBits = 0,
bool BiggerPattern = false) { … }
bool AArch64DAGToDAGISel::tryBitfieldExtractOp(SDNode *N) { … }
static bool isBitfieldDstMask(uint64_t DstMask, const APInt &BitsToBeInserted,
unsigned NumberOfIgnoredHighBits, EVT VT) { … }
static void getUsefulBits(SDValue Op, APInt &UsefulBits, unsigned Depth = 0);
static void getUsefulBitsFromAndWithImmediate(SDValue Op, APInt &UsefulBits,
unsigned Depth) { … }
static void getUsefulBitsFromBitfieldMoveOpd(SDValue Op, APInt &UsefulBits,
uint64_t Imm, uint64_t MSB,
unsigned Depth) { … }
static void getUsefulBitsFromUBFM(SDValue Op, APInt &UsefulBits,
unsigned Depth) { … }
static void getUsefulBitsFromOrWithShiftedReg(SDValue Op, APInt &UsefulBits,
unsigned Depth) { … }
static void getUsefulBitsFromBFM(SDValue Op, SDValue Orig, APInt &UsefulBits,
unsigned Depth) { … }
static void getUsefulBitsForUse(SDNode *UserNode, APInt &UsefulBits,
SDValue Orig, unsigned Depth) { … }
static void getUsefulBits(SDValue Op, APInt &UsefulBits, unsigned Depth) { … }
static SDValue getLeftShift(SelectionDAG *CurDAG, SDValue Op, int ShlAmount) { … }
static bool isBitfieldPositioningOpFromAnd(SelectionDAG *CurDAG, SDValue Op,
bool BiggerPattern,
const uint64_t NonZeroBits,
SDValue &Src, int &DstLSB,
int &Width);
static bool isBitfieldPositioningOpFromShl(SelectionDAG *CurDAG, SDValue Op,
bool BiggerPattern,
const uint64_t NonZeroBits,
SDValue &Src, int &DstLSB,
int &Width);
static bool isBitfieldPositioningOp(SelectionDAG *CurDAG, SDValue Op,
bool BiggerPattern, SDValue &Src,
int &DstLSB, int &Width) { … }
static bool isBitfieldPositioningOpFromAnd(SelectionDAG *CurDAG, SDValue Op,
bool BiggerPattern,
const uint64_t NonZeroBits,
SDValue &Src, int &DstLSB,
int &Width) { … }
static bool isSeveralBitsPositioningOpFromShl(const uint64_t ShlImm, SDValue Op,
SDValue &Src, int &DstLSB,
int &Width) { … }
static bool isBitfieldPositioningOpFromShl(SelectionDAG *CurDAG, SDValue Op,
bool BiggerPattern,
const uint64_t NonZeroBits,
SDValue &Src, int &DstLSB,
int &Width) { … }
static bool isShiftedMask(uint64_t Mask, EVT VT) { … }
static bool tryBitfieldInsertOpFromOrAndImm(SDNode *N, SelectionDAG *CurDAG) { … }
static bool isWorthFoldingIntoOrrWithShift(SDValue Dst, SelectionDAG *CurDAG,
SDValue &ShiftedOperand,
uint64_t &EncodedShiftImm) { … }
static bool tryOrrWithShift(SDNode *N, SDValue OrOpd0, SDValue OrOpd1,
SDValue Src, SDValue Dst, SelectionDAG *CurDAG,
const bool BiggerPattern) { … }
static bool tryBitfieldInsertOpFromOr(SDNode *N, const APInt &UsefulBits,
SelectionDAG *CurDAG) { … }
bool AArch64DAGToDAGISel::tryBitfieldInsertOp(SDNode *N) { … }
bool AArch64DAGToDAGISel::tryBitfieldInsertInZeroOp(SDNode *N) { … }
bool AArch64DAGToDAGISel::tryShiftAmountMod(SDNode *N) { … }
static bool checkCVTFixedPointOperandWithFBits(SelectionDAG *CurDAG, SDValue N,
SDValue &FixedPos,
unsigned RegWidth,
bool isReciprocal) { … }
bool AArch64DAGToDAGISel::SelectCVTFixedPosOperand(SDValue N, SDValue &FixedPos,
unsigned RegWidth) { … }
bool AArch64DAGToDAGISel::SelectCVTFixedPosRecipOperand(SDValue N,
SDValue &FixedPos,
unsigned RegWidth) { … }
static int getIntOperandFromRegisterString(StringRef RegString) { … }
bool AArch64DAGToDAGISel::tryReadRegister(SDNode *N) { … }
bool AArch64DAGToDAGISel::tryWriteRegister(SDNode *N) { … }
bool AArch64DAGToDAGISel::SelectCMP_SWAP(SDNode *N) { … }
bool AArch64DAGToDAGISel::SelectSVEAddSubImm(SDValue N, MVT VT, SDValue &Imm,
SDValue &Shift) { … }
bool AArch64DAGToDAGISel::SelectSVEAddSubSSatImm(SDValue N, MVT VT,
SDValue &Imm, SDValue &Shift,
bool Negate) { … }
bool AArch64DAGToDAGISel::SelectSVECpyDupImm(SDValue N, MVT VT, SDValue &Imm,
SDValue &Shift) { … }
bool AArch64DAGToDAGISel::SelectSVESignedArithImm(SDValue N, SDValue &Imm) { … }
bool AArch64DAGToDAGISel::SelectSVEArithImm(SDValue N, MVT VT, SDValue &Imm) { … }
bool AArch64DAGToDAGISel::SelectSVELogicalImm(SDValue N, MVT VT, SDValue &Imm,
bool Invert) { … }
bool AArch64DAGToDAGISel::SelectSVEShiftImm(SDValue N, uint64_t Low,
uint64_t High, bool AllowSaturation,
SDValue &Imm) { … }
bool AArch64DAGToDAGISel::trySelectStackSlotTagP(SDNode *N) { … }
void AArch64DAGToDAGISel::SelectTagP(SDNode *N) { … }
bool AArch64DAGToDAGISel::trySelectCastFixedLengthToScalableVector(SDNode *N) { … }
bool AArch64DAGToDAGISel::trySelectCastScalableToFixedLengthVector(SDNode *N) { … }
bool AArch64DAGToDAGISel::trySelectXAR(SDNode *N) { … }
void AArch64DAGToDAGISel::Select(SDNode *Node) { … }
FunctionPass *llvm::createAArch64ISelDag(AArch64TargetMachine &TM,
CodeGenOptLevel OptLevel) { … }
static EVT getPackedVectorTypeFromPredicateType(LLVMContext &Ctx, EVT PredVT,
unsigned NumVec) { … }
static EVT getMemVTFromNode(LLVMContext &Ctx, SDNode *Root) { … }
template <int64_t Min, int64_t Max>
bool AArch64DAGToDAGISel::SelectAddrModeIndexedSVE(SDNode *Root, SDValue N,
SDValue &Base,
SDValue &OffImm) { … }
bool AArch64DAGToDAGISel::SelectSVERegRegAddrMode(SDValue N, unsigned Scale,
SDValue &Base,
SDValue &Offset) { … }
bool AArch64DAGToDAGISel::SelectAllActivePredicate(SDValue N) { … }
bool AArch64DAGToDAGISel::SelectAnyPredicate(SDValue N) { … }
bool AArch64DAGToDAGISel::SelectSMETileSlice(SDValue N, unsigned MaxSize,
SDValue &Base, SDValue &Offset,
unsigned Scale) { … }