#ifdef GET_GLOBALISEL_PREDICATE_BITSET
const unsigned MAX_SUBTARGET_PREDICATES = …;
PredicateBitset;
#endif
#ifdef GET_GLOBALISEL_TEMPORARIES_DECL
mutable MatcherState State;
typedef ComplexRendererFns(AMDGPUInstructionSelector::*ComplexMatcherMemFn)(MachineOperand &) const;
typedef void(AMDGPUInstructionSelector::*CustomRendererFn)(MachineInstrBuilder &, const MachineInstr &, int) const;
const ExecInfoTy<PredicateBitset, ComplexMatcherMemFn, CustomRendererFn> ExecInfo;
static AMDGPUInstructionSelector::ComplexMatcherMemFn ComplexPredicateFns[];
static AMDGPUInstructionSelector::CustomRendererFn CustomRenderers[];
bool testImmPredicate_I64(unsigned PredicateID, int64_t Imm) const override;
bool testImmPredicate_APInt(unsigned PredicateID, const APInt &Imm) const override;
bool testImmPredicate_APFloat(unsigned PredicateID, const APFloat &Imm) const override;
const uint8_t *getMatchTable() const override;
bool testMIPredicate_MI(unsigned PredicateID, const MachineInstr &MI, const MatcherState &State) const override;
bool testSimplePredicate(unsigned PredicateID) const override;
bool runCustomAction(unsigned FnID, const MatcherState &State, NewMIVector &OutMIs) const override;
#endif
#ifdef GET_GLOBALISEL_TEMPORARIES_INIT
, State(6),
ExecInfo(TypeObjects, NumTypeObjects, FeatureBitsets, ComplexPredicateFns, CustomRenderers)
#endif
#ifdef GET_GLOBALISEL_IMPL
enum {
GILLT_p0s64,
GILLT_p1s64,
GILLT_p2s32,
GILLT_p3s32,
GILLT_p4s64,
GILLT_p5s32,
GILLT_p6s32,
GILLT_s1,
GILLT_s16,
GILLT_s32,
GILLT_s64,
GILLT_v2s16,
GILLT_v2s32,
GILLT_v2s64,
GILLT_v3s32,
GILLT_v3s64,
GILLT_v4s16,
GILLT_v4s32,
GILLT_v4s64,
GILLT_v5s32,
GILLT_v6s32,
GILLT_v7s32,
GILLT_v8s16,
GILLT_v8s32,
GILLT_v8s64,
GILLT_v9s32,
GILLT_v10s32,
GILLT_v11s32,
GILLT_v12s32,
GILLT_v16s16,
GILLT_v16s32,
GILLT_v16s64,
GILLT_v32s16,
GILLT_v32s32,
};
const static size_t NumTypeObjects = 34;
const static LLT TypeObjects[] = {
LLT::pointer(0, 64),
LLT::pointer(1, 64),
LLT::pointer(2, 32),
LLT::pointer(3, 32),
LLT::pointer(4, 64),
LLT::pointer(5, 32),
LLT::pointer(6, 32),
LLT::scalar(1),
LLT::scalar(16),
LLT::scalar(32),
LLT::scalar(64),
LLT::vector(ElementCount::getFixed(2), 16),
LLT::vector(ElementCount::getFixed(2), 32),
LLT::vector(ElementCount::getFixed(2), 64),
LLT::vector(ElementCount::getFixed(3), 32),
LLT::vector(ElementCount::getFixed(3), 64),
LLT::vector(ElementCount::getFixed(4), 16),
LLT::vector(ElementCount::getFixed(4), 32),
LLT::vector(ElementCount::getFixed(4), 64),
LLT::vector(ElementCount::getFixed(5), 32),
LLT::vector(ElementCount::getFixed(6), 32),
LLT::vector(ElementCount::getFixed(7), 32),
LLT::vector(ElementCount::getFixed(8), 16),
LLT::vector(ElementCount::getFixed(8), 32),
LLT::vector(ElementCount::getFixed(8), 64),
LLT::vector(ElementCount::getFixed(9), 32),
LLT::vector(ElementCount::getFixed(10), 32),
LLT::vector(ElementCount::getFixed(11), 32),
LLT::vector(ElementCount::getFixed(12), 32),
LLT::vector(ElementCount::getFixed(16), 16),
LLT::vector(ElementCount::getFixed(16), 32),
LLT::vector(ElementCount::getFixed(16), 64),
LLT::vector(ElementCount::getFixed(32), 16),
LLT::vector(ElementCount::getFixed(32), 32),
};
enum SubtargetFeatureBits : uint8_t {
Feature_FalsePredicateBit = 100,
Feature_isGFX6Bit = 41,
Feature_isGFX6GFX7Bit = 8,
Feature_isGFX6GFX7GFX10Bit = 98,
Feature_isGFX7OnlyBit = 71,
Feature_isGFX7GFX8GFX9Bit = 36,
Feature_isGFX6GFX7GFX8GFX9Bit = 56,
Feature_isGFX6GFX7GFX8GFX9GFX10Bit = 111,
Feature_isNotGFX12PlusBit = 19,
Feature_isGFX8GFX9GFX10GFX11Bit = 20,
Feature_isGFX7PlusBit = 13,
Feature_isGFX8PlusBit = 4,
Feature_isGFX8OnlyBit = 21,
Feature_isGFX9PlusBit = 0,
Feature_isNotGFX9PlusBit = 46,
Feature_isGFX9OnlyBit = 49,
Feature_isGFX90APlusBit = 32,
Feature_isNotGFX90APlusBit = 22,
Feature_isGFX908orGFX90ABit = 31,
Feature_isGFX940PlusBit = 33,
Feature_isGFX8GFX9Bit = 15,
Feature_isGFX10OnlyBit = 52,
Feature_isGFX10PlusBit = 5,
Feature_isGFX9GFX10Bit = 47,
Feature_isGFX11OnlyBit = 53,
Feature_isGFX11PlusBit = 1,
Feature_isGFX12OnlyBit = 54,
Feature_isGFX12PlusBit = 3,
Feature_HasFlatAddressSpaceBit = 73,
Feature_HasFlatBufferGlobalAtomicFaddF64InstBit = 89,
Feature_HasAtomicFMinFMaxF32GlobalInstsBit = 83,
Feature_HasAtomicFMinFMaxF64GlobalInstsBit = 88,
Feature_HasAtomicFMinFMaxF32FlatInstsBit = 79,
Feature_HasAtomicFMinFMaxF64FlatInstsBit = 80,
Feature_HasLdsAtomicAddF64Bit = 114,
Feature_HasFlatGlobalInstsBit = 74,
Feature_HasFlatScratchInstsBit = 77,
Feature_HasD16LoadStoreBit = 82,
Feature_HasFlatScratchSVSModeBit = 78,
Feature_HasGFX10_BEncodingBit = 40,
Feature_HasUnpackedD16VMemBit = 94,
Feature_HasPackedD16VMemBit = 95,
Feature_HasUnrestrictedSOffsetBit = 93,
Feature_D16PreservesUnusedBitsBit = 75,
Feature_LDSRequiresM0InitBit = 107,
Feature_NotLDSRequiresM0InitBit = 108,
Feature_HasExportInstsBit = 7,
Feature_HasLDSFPAtomicAddF32Bit = 113,
Feature_HasAddNoCarryInstsBit = 55,
Feature_NotHasAddNoCarryInstsBit = 105,
Feature_HasXNACKEnabledBit = 70,
Feature_Has16BitInstsBit = 9,
Feature_HasTrue16BitInstsBit = 14,
Feature_NotHasTrue16BitInstsBit = 10,
Feature_UseRealTrue16InstsBit = 11,
Feature_UseFakeTrue16InstsBit = 12,
Feature_HasVOP3PInstsBit = 64,
Feature_HasMed3_16Bit = 106,
Feature_HasMinMaxDenormModesBit = 99,
Feature_NotHasMinMaxDenormModesBit = 101,
Feature_HasPackedFP32OpsBit = 34,
Feature_HasImageInstsBit = 6,
Feature_HasIntClampBit = 61,
Feature_HasMadMixInstsBit = 65,
Feature_HasScalarStoresBit = 37,
Feature_has16BankLDSBit = 42,
Feature_has32BankLDSBit = 23,
Feature_HasFmaMixInstsBit = 67,
Feature_HasDLInstsBit = 16,
Feature_HasFmacF64InstBit = 104,
Feature_HasDot1InstsBit = 29,
Feature_HasDot2InstsBit = 26,
Feature_HasDot3InstsBit = 60,
Feature_HasDot4InstsBit = 59,
Feature_HasDot5InstsBit = 57,
Feature_HasDot6InstsBit = 58,
Feature_HasDot7InstsBit = 28,
Feature_HasDot8InstsBit = 68,
Feature_HasDot9InstsBit = 24,
Feature_HasDot10InstsBit = 27,
Feature_HasDot11InstsBit = 69,
Feature_HasGetWaveIdInstBit = 39,
Feature_HasMAIInstsBit = 30,
Feature_HasSMemRealTimeBit = 38,
Feature_HasSMemTimeInstBit = 35,
Feature_HasShaderCyclesRegisterBit = 72,
Feature_HasShaderCyclesHiLoRegistersBit = 45,
Feature_HasFP8ConversionInstsBit = 51,
Feature_HasMadMacF32InstsBit = 17,
Feature_HasFmaLegacy32Bit = 18,
Feature_HasAtomicDsPkAdd16InstsBit = 112,
Feature_HasAtomicFlatPkAdd16InstsBit = 91,
Feature_HasAtomicFaddRtnInstsBit = 86,
Feature_HasAtomicFaddNoRtnInstsBit = 84,
Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit = 85,
Feature_HasAtomicBufferGlobalPkAddF16InstsBit = 87,
Feature_HasAtomicGlobalPkAddBF16InstBit = 92,
Feature_HasAtomicBufferPkAddBF16InstBit = 96,
Feature_HasFlatAtomicFaddF32InstBit = 90,
Feature_EnableFlatScratchBit = 76,
Feature_DisableFlatScratchBit = 97,
Feature_HasUnalignedAccessModeBit = 109,
Feature_HasMADIntraFwdBugBit = 63,
Feature_HasNotMADIntraFwdBugBit = 62,
Feature_HasSALUFloatInstsBit = 2,
Feature_HasPseudoScalarTransBit = 25,
Feature_HasGDSBit = 110,
Feature_HasCvtFP8VOP1BugBit = 48,
Feature_HasNoCvtFP8VOP1BugBit = 50,
Feature_HasAtomicCSubNoRtnInstsBit = 81,
Feature_FP16DenormalsBit = 103,
Feature_FP64DenormalsBit = 102,
Feature_NoFP32DenormalsBit = 66,
Feature_isWave32Bit = 43,
Feature_isWave64Bit = 44,
};
PredicateBitset AMDGPUInstructionSelector::
computeAvailableModuleFeatures(const AMDGPUSubtarget *Subtarget) const {
PredicateBitset Features{};
if (false)
Features.set(Feature_FalsePredicateBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS)
Features.set(Feature_isGFX6Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS)
Features.set(Feature_isGFX6GFX7Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
Features.set(Feature_isGFX6GFX7GFX10Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS)
Features.set(Feature_isGFX7OnlyBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
Features.set(Feature_isGFX7GFX8GFX9Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
Features.set(Feature_isGFX6GFX7GFX8GFX9Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
Features.set(Feature_isGFX6GFX7GFX8GFX9GFX10Bit);
if (Subtarget->getGeneration() <= AMDGPUSubtarget::GFX11)
Features.set(Feature_isNotGFX12PlusBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX11)
Features.set(Feature_isGFX8GFX9GFX10GFX11Bit);
if (Subtarget->getGeneration() >= AMDGPUSubtarget::SEA_ISLANDS)
Features.set(Feature_isGFX7PlusBit);
if (Subtarget->getGeneration() >= AMDGPUSubtarget::VOLCANIC_ISLANDS)
Features.set(Feature_isGFX8PlusBit);
if (Subtarget->getGeneration() ==AMDGPUSubtarget::VOLCANIC_ISLANDS)
Features.set(Feature_isGFX8OnlyBit);
if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX9)
Features.set(Feature_isGFX9PlusBit);
if (Subtarget->getGeneration() < AMDGPUSubtarget::GFX9)
Features.set(Feature_isNotGFX9PlusBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
Features.set(Feature_isGFX9OnlyBit);
if (Subtarget->hasGFX90AInsts())
Features.set(Feature_isGFX90APlusBit);
if (!Subtarget->hasGFX90AInsts())
Features.set(Feature_isNotGFX90APlusBit);
if (Subtarget->hasMAIInsts() && !Subtarget->hasGFX940Insts())
Features.set(Feature_isGFX908orGFX90ABit);
if (Subtarget->hasGFX940Insts())
Features.set(Feature_isGFX940PlusBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
Features.set(Feature_isGFX8GFX9Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
Features.set(Feature_isGFX10OnlyBit);
if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX10)
Features.set(Feature_isGFX10PlusBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX9 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
Features.set(Feature_isGFX9GFX10Bit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX11)
Features.set(Feature_isGFX11OnlyBit);
if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX11)
Features.set(Feature_isGFX11PlusBit);
if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX12)
Features.set(Feature_isGFX12OnlyBit);
if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX12)
Features.set(Feature_isGFX12PlusBit);
if (Subtarget->hasFlatAddressSpace())
Features.set(Feature_HasFlatAddressSpaceBit);
if (Subtarget->hasFlatBufferGlobalAtomicFaddF64Inst())
Features.set(Feature_HasFlatBufferGlobalAtomicFaddF64InstBit);
if (Subtarget->hasAtomicFMinFMaxF32GlobalInsts())
Features.set(Feature_HasAtomicFMinFMaxF32GlobalInstsBit);
if (Subtarget->hasAtomicFMinFMaxF64GlobalInsts())
Features.set(Feature_HasAtomicFMinFMaxF64GlobalInstsBit);
if (Subtarget->hasAtomicFMinFMaxF32FlatInsts())
Features.set(Feature_HasAtomicFMinFMaxF32FlatInstsBit);
if (Subtarget->hasAtomicFMinFMaxF64FlatInsts())
Features.set(Feature_HasAtomicFMinFMaxF64FlatInstsBit);
if (Subtarget->hasLdsAtomicAddF64())
Features.set(Feature_HasLdsAtomicAddF64Bit);
if (Subtarget->hasFlatGlobalInsts())
Features.set(Feature_HasFlatGlobalInstsBit);
if (Subtarget->hasFlatScratchInsts())
Features.set(Feature_HasFlatScratchInstsBit);
if (Subtarget->hasD16LoadStore())
Features.set(Feature_HasD16LoadStoreBit);
if (Subtarget->hasFlatScratchSVSMode())
Features.set(Feature_HasFlatScratchSVSModeBit);
if (Subtarget->hasGFX10_BEncoding())
Features.set(Feature_HasGFX10_BEncodingBit);
if (Subtarget->hasUnpackedD16VMem())
Features.set(Feature_HasUnpackedD16VMemBit);
if (!Subtarget->hasUnpackedD16VMem())
Features.set(Feature_HasPackedD16VMemBit);
if (!Subtarget->hasRestrictedSOffset())
Features.set(Feature_HasUnrestrictedSOffsetBit);
if (Subtarget->d16PreservesUnusedBits())
Features.set(Feature_D16PreservesUnusedBitsBit);
if (Subtarget->ldsRequiresM0Init())
Features.set(Feature_LDSRequiresM0InitBit);
if (!Subtarget->ldsRequiresM0Init())
Features.set(Feature_NotLDSRequiresM0InitBit);
if (Subtarget->hasExportInsts())
Features.set(Feature_HasExportInstsBit);
if (Subtarget->hasLDSFPAtomicAddF32())
Features.set(Feature_HasLDSFPAtomicAddF32Bit);
if (Subtarget->hasAddNoCarry())
Features.set(Feature_HasAddNoCarryInstsBit);
if (!Subtarget->hasAddNoCarry())
Features.set(Feature_NotHasAddNoCarryInstsBit);
if (Subtarget->isXNACKEnabled())
Features.set(Feature_HasXNACKEnabledBit);
if (Subtarget->has16BitInsts())
Features.set(Feature_Has16BitInstsBit);
if (Subtarget->hasTrue16BitInsts())
Features.set(Feature_HasTrue16BitInstsBit);
if (!Subtarget->hasTrue16BitInsts())
Features.set(Feature_NotHasTrue16BitInstsBit);
if (Subtarget->useRealTrue16Insts())
Features.set(Feature_UseRealTrue16InstsBit);
if (Subtarget->hasTrue16BitInsts() && !Subtarget->useRealTrue16Insts())
Features.set(Feature_UseFakeTrue16InstsBit);
if (Subtarget->hasVOP3PInsts())
Features.set(Feature_HasVOP3PInstsBit);
if (Subtarget->hasMed3_16())
Features.set(Feature_HasMed3_16Bit);
if (Subtarget->supportsMinMaxDenormModes())
Features.set(Feature_HasMinMaxDenormModesBit);
if (!Subtarget->supportsMinMaxDenormModes())
Features.set(Feature_NotHasMinMaxDenormModesBit);
if (Subtarget->hasPackedFP32Ops())
Features.set(Feature_HasPackedFP32OpsBit);
if (Subtarget->hasImageInsts())
Features.set(Feature_HasImageInstsBit);
if (Subtarget->hasIntClamp())
Features.set(Feature_HasIntClampBit);
if (Subtarget->hasMadMixInsts())
Features.set(Feature_HasMadMixInstsBit);
if (Subtarget->hasScalarStores())
Features.set(Feature_HasScalarStoresBit);
if (Subtarget->getLDSBankCount() == 16)
Features.set(Feature_has16BankLDSBit);
if (Subtarget->getLDSBankCount() == 32)
Features.set(Feature_has32BankLDSBit);
if (Subtarget->hasFmaMixInsts())
Features.set(Feature_HasFmaMixInstsBit);
if (Subtarget->hasDLInsts())
Features.set(Feature_HasDLInstsBit);
if (Subtarget->hasFmacF64Inst())
Features.set(Feature_HasFmacF64InstBit);
if (Subtarget->hasDot1Insts())
Features.set(Feature_HasDot1InstsBit);
if (Subtarget->hasDot2Insts())
Features.set(Feature_HasDot2InstsBit);
if (Subtarget->hasDot3Insts())
Features.set(Feature_HasDot3InstsBit);
if (Subtarget->hasDot4Insts())
Features.set(Feature_HasDot4InstsBit);
if (Subtarget->hasDot5Insts())
Features.set(Feature_HasDot5InstsBit);
if (Subtarget->hasDot6Insts())
Features.set(Feature_HasDot6InstsBit);
if (Subtarget->hasDot7Insts())
Features.set(Feature_HasDot7InstsBit);
if (Subtarget->hasDot8Insts())
Features.set(Feature_HasDot8InstsBit);
if (Subtarget->hasDot9Insts())
Features.set(Feature_HasDot9InstsBit);
if (Subtarget->hasDot10Insts())
Features.set(Feature_HasDot10InstsBit);
if (Subtarget->hasDot11Insts())
Features.set(Feature_HasDot11InstsBit);
if (Subtarget->hasGetWaveIdInst())
Features.set(Feature_HasGetWaveIdInstBit);
if (Subtarget->hasMAIInsts())
Features.set(Feature_HasMAIInstsBit);
if (Subtarget->hasSMemRealTime())
Features.set(Feature_HasSMemRealTimeBit);
if (Subtarget->hasSMemTimeInst())
Features.set(Feature_HasSMemTimeInstBit);
if (Subtarget->hasShaderCyclesRegister())
Features.set(Feature_HasShaderCyclesRegisterBit);
if (Subtarget->hasShaderCyclesHiLoRegisters())
Features.set(Feature_HasShaderCyclesHiLoRegistersBit);
if (Subtarget->hasFP8ConversionInsts())
Features.set(Feature_HasFP8ConversionInstsBit);
if (Subtarget->hasMadMacF32Insts())
Features.set(Feature_HasMadMacF32InstsBit);
if (Subtarget->hasGFX10_3Insts())
Features.set(Feature_HasFmaLegacy32Bit);
if (Subtarget->hasAtomicDsPkAdd16Insts())
Features.set(Feature_HasAtomicDsPkAdd16InstsBit);
if (Subtarget->hasAtomicFlatPkAdd16Insts())
Features.set(Feature_HasAtomicFlatPkAdd16InstsBit);
if (Subtarget->hasAtomicFaddRtnInsts())
Features.set(Feature_HasAtomicFaddRtnInstsBit);
if (Subtarget->hasAtomicFaddNoRtnInsts())
Features.set(Feature_HasAtomicFaddNoRtnInstsBit);
if (Subtarget->hasAtomicBufferGlobalPkAddF16NoRtnInsts() || Subtarget->hasAtomicBufferGlobalPkAddF16Insts())
Features.set(Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit);
if (Subtarget->hasAtomicBufferGlobalPkAddF16Insts())
Features.set(Feature_HasAtomicBufferGlobalPkAddF16InstsBit);
if (Subtarget->hasAtomicGlobalPkAddBF16Inst())
Features.set(Feature_HasAtomicGlobalPkAddBF16InstBit);
if (Subtarget->hasAtomicBufferPkAddBF16Inst())
Features.set(Feature_HasAtomicBufferPkAddBF16InstBit);
if (Subtarget->hasFlatAtomicFaddF32Inst())
Features.set(Feature_HasFlatAtomicFaddF32InstBit);
if (Subtarget->enableFlatScratch())
Features.set(Feature_EnableFlatScratchBit);
if (!Subtarget->enableFlatScratch())
Features.set(Feature_DisableFlatScratchBit);
if (Subtarget->hasUnalignedAccessMode())
Features.set(Feature_HasUnalignedAccessModeBit);
if (Subtarget->hasMADIntraFwdBug())
Features.set(Feature_HasMADIntraFwdBugBit);
if (!Subtarget->hasMADIntraFwdBug())
Features.set(Feature_HasNotMADIntraFwdBugBit);
if (Subtarget->hasSALUFloatInsts())
Features.set(Feature_HasSALUFloatInstsBit);
if (Subtarget->hasPseudoScalarTrans())
Features.set(Feature_HasPseudoScalarTransBit);
if (Subtarget->hasGDS())
Features.set(Feature_HasGDSBit);
if (Subtarget->hasCvtFP8VOP1Bug())
Features.set(Feature_HasCvtFP8VOP1BugBit);
if (!Subtarget->hasCvtFP8VOP1Bug())
Features.set(Feature_HasNoCvtFP8VOP1BugBit);
if (Subtarget->hasAtomicCSubNoRtnInsts())
Features.set(Feature_HasAtomicCSubNoRtnInstsBit);
if (Subtarget->getWavefrontSize() == 32)
Features.set(Feature_isWave32Bit);
if (Subtarget->getWavefrontSize() == 64)
Features.set(Feature_isWave64Bit);
return Features;
}
void AMDGPUInstructionSelector::setupGeneratedPerFunctionState(MachineFunction &MF) {
AvailableFunctionFeatures = computeAvailableFunctionFeatures((const AMDGPUSubtarget *)&MF.getSubtarget(), &MF);
}
PredicateBitset AMDGPUInstructionSelector::
computeAvailableFunctionFeatures(const AMDGPUSubtarget *Subtarget, const MachineFunction *MF) const {
PredicateBitset Features{};
if (MF->getInfo<SIMachineFunctionInfo>()->getMode().FP64FP16Denormals != DenormalMode::getPreserveSign())
Features.set(Feature_FP16DenormalsBit);
if (MF->getInfo<SIMachineFunctionInfo>()->getMode().FP64FP16Denormals != DenormalMode::getPreserveSign())
Features.set(Feature_FP64DenormalsBit);
if (MF->getInfo<SIMachineFunctionInfo>()->getMode().FP32Denormals == DenormalMode::getPreserveSign())
Features.set(Feature_NoFP32DenormalsBit);
return Features;
}
enum {
GIFBS_Invalid,
GIFBS_DisableFlatScratch,
GIFBS_Has16BitInsts,
GIFBS_HasAddNoCarryInsts,
GIFBS_HasAtomicBufferGlobalPkAddF16Insts,
GIFBS_HasAtomicBufferGlobalPkAddF16NoRtnInsts,
GIFBS_HasAtomicBufferPkAddBF16Inst,
GIFBS_HasAtomicCSubNoRtnInsts,
GIFBS_HasAtomicFMinFMaxF32GlobalInsts,
GIFBS_HasAtomicFMinFMaxF64GlobalInsts,
GIFBS_HasAtomicFaddNoRtnInsts,
GIFBS_HasAtomicFaddRtnInsts,
GIFBS_HasAtomicFlatPkAdd16Insts,
GIFBS_HasAtomicGlobalPkAddBF16Inst,
GIFBS_HasD16LoadStore,
GIFBS_HasDLInsts,
GIFBS_HasDot10Insts,
GIFBS_HasDot1Insts,
GIFBS_HasDot2Insts,
GIFBS_HasDot3Insts,
GIFBS_HasDot4Insts,
GIFBS_HasDot5Insts,
GIFBS_HasDot6Insts,
GIFBS_HasDot7Insts,
GIFBS_HasDot8Insts,
GIFBS_HasDot9Insts,
GIFBS_HasFlatAddressSpace,
GIFBS_HasFlatAtomicFaddF32Inst,
GIFBS_HasFlatBufferGlobalAtomicFaddF64Inst,
GIFBS_HasFlatGlobalInsts,
GIFBS_HasFmaLegacy32,
GIFBS_HasFmaMixInsts,
GIFBS_HasFmacF64Inst,
GIFBS_HasGDS,
GIFBS_HasGFX10_BEncoding,
GIFBS_HasGetWaveIdInst,
GIFBS_HasIntClamp,
GIFBS_HasLdsAtomicAddF64,
GIFBS_HasMAIInsts,
GIFBS_HasMadMacF32Insts,
GIFBS_HasMed3_16,
GIFBS_HasMinMaxDenormModes,
GIFBS_HasPackedD16VMem,
GIFBS_HasPackedFP32Ops,
GIFBS_HasPseudoScalarTrans,
GIFBS_HasSALUFloatInsts,
GIFBS_HasSMemRealTime,
GIFBS_HasSMemTimeInst,
GIFBS_HasShaderCyclesHiLoRegisters,
GIFBS_HasShaderCyclesRegister,
GIFBS_HasTrue16BitInsts,
GIFBS_HasUnpackedD16VMem,
GIFBS_HasUnrestrictedSOffset,
GIFBS_HasVOP3PInsts,
GIFBS_HasXNACKEnabled,
GIFBS_LDSRequiresM0Init,
GIFBS_NotHasAddNoCarryInsts,
GIFBS_NotHasTrue16BitInsts,
GIFBS_NotLDSRequiresM0Init,
GIFBS_UseFakeTrue16Insts,
GIFBS_UseRealTrue16Insts,
GIFBS_isGFX10Only,
GIFBS_isGFX10Plus,
GIFBS_isGFX11Only,
GIFBS_isGFX11Plus,
GIFBS_isGFX12Only,
GIFBS_isGFX12Plus,
GIFBS_isGFX6,
GIFBS_isGFX6GFX7,
GIFBS_isGFX6GFX7GFX8GFX9,
GIFBS_isGFX7GFX8GFX9,
GIFBS_isGFX7Only,
GIFBS_isGFX7Plus,
GIFBS_isGFX8GFX9,
GIFBS_isGFX8GFX9GFX10GFX11,
GIFBS_isGFX8Plus,
GIFBS_isGFX908orGFX90A,
GIFBS_isGFX90APlus,
GIFBS_isGFX940Plus,
GIFBS_isGFX9GFX10,
GIFBS_isGFX9Only,
GIFBS_isGFX9Plus,
GIFBS_isNotGFX12Plus,
GIFBS_isNotGFX90APlus,
GIFBS_isNotGFX9Plus,
GIFBS_isWave32,
GIFBS_isWave64,
GIFBS_DisableFlatScratch_HasD16LoadStore,
GIFBS_DisableFlatScratch_HasUnrestrictedSOffset,
GIFBS_EnableFlatScratch_HasFlatScratchInsts,
GIFBS_FP16Denormals_NotHasMinMaxDenormModes,
GIFBS_FalsePredicate_NotHasMinMaxDenormModes,
GIFBS_Has16BitInsts_HasIntClamp,
GIFBS_Has16BitInsts_NotHasTrue16BitInsts,
GIFBS_Has16BitInsts_UseFakeTrue16Insts,
GIFBS_Has16BitInsts_UseRealTrue16Insts,
GIFBS_Has16BitInsts_isGFX6GFX7GFX8GFX9,
GIFBS_Has16BitInsts_isGFX8GFX9,
GIFBS_Has16BitInsts_isGFX8Only,
GIFBS_Has16BitInsts_isNotGFX90APlus,
GIFBS_HasAddNoCarryInsts_HasIntClamp,
GIFBS_HasAtomicBufferGlobalPkAddF16Insts_HasUnrestrictedSOffset,
GIFBS_HasAtomicBufferGlobalPkAddF16NoRtnInsts_HasUnrestrictedSOffset,
GIFBS_HasAtomicBufferPkAddBF16Inst_HasUnrestrictedSOffset,
GIFBS_HasAtomicCSubNoRtnInsts_isGFX12Plus,
GIFBS_HasAtomicDsPkAdd16Insts_HasGDS,
GIFBS_HasAtomicDsPkAdd16Insts_LDSRequiresM0Init,
GIFBS_HasAtomicDsPkAdd16Insts_NotLDSRequiresM0Init,
GIFBS_HasAtomicFMinFMaxF32FlatInsts_HasFlatAddressSpace,
GIFBS_HasAtomicFMinFMaxF32FlatInsts_HasFlatGlobalInsts,
GIFBS_HasAtomicFMinFMaxF32GlobalInsts_HasFlatGlobalInsts,
GIFBS_HasAtomicFMinFMaxF32GlobalInsts_HasUnrestrictedSOffset,
GIFBS_HasAtomicFMinFMaxF64FlatInsts_HasFlatAddressSpace,
GIFBS_HasAtomicFMinFMaxF64FlatInsts_HasFlatGlobalInsts,
GIFBS_HasAtomicFMinFMaxF64GlobalInsts_HasFlatGlobalInsts,
GIFBS_HasAtomicFMinFMaxF64GlobalInsts_HasUnrestrictedSOffset,
GIFBS_HasAtomicFaddNoRtnInsts_HasUnrestrictedSOffset,
GIFBS_HasAtomicFaddRtnInsts_HasUnrestrictedSOffset,
GIFBS_HasCvtFP8VOP1Bug_isGFX9Only,
GIFBS_HasDot11Insts_isGFX12Plus,
GIFBS_HasExportInsts_isGFX12Plus,
GIFBS_HasFP8ConversionInsts_isGFX12Plus,
GIFBS_HasFP8ConversionInsts_isGFX940Plus,
GIFBS_HasFP8ConversionInsts_isGFX9Plus,
GIFBS_HasFlatAddressSpace_isGFX12Plus,
GIFBS_HasFlatBufferGlobalAtomicFaddF64Inst_HasUnrestrictedSOffset,
GIFBS_HasFlatGlobalInsts_isGFX12Plus,
GIFBS_HasGDS_HasLDSFPAtomicAddF32,
GIFBS_HasGDS_isGFX11Plus,
GIFBS_HasGDS_isGFX6GFX7GFX8GFX9GFX10,
GIFBS_HasImageInsts_isGFX12Plus,
GIFBS_HasLDSFPAtomicAddF32_LDSRequiresM0Init,
GIFBS_HasLDSFPAtomicAddF32_NotLDSRequiresM0Init,
GIFBS_HasMADIntraFwdBug_isGFX11Only,
GIFBS_HasMadMacF32Insts_NoFP32Denormals,
GIFBS_HasMadMixInsts_NoFP32Denormals,
GIFBS_HasMinMaxDenormModes_isGFX12Plus,
GIFBS_HasMinMaxDenormModes_isNotGFX12Plus,
GIFBS_HasNoCvtFP8VOP1Bug_isGFX9Only,
GIFBS_HasNotMADIntraFwdBug_isGFX9Plus,
GIFBS_HasPackedD16VMem_HasUnrestrictedSOffset,
GIFBS_HasScalarStores_isGFX8Plus,
GIFBS_HasTrue16BitInsts_isGFX10Plus,
GIFBS_HasUnalignedAccessMode_LDSRequiresM0Init,
GIFBS_HasUnalignedAccessMode_NotLDSRequiresM0Init,
GIFBS_HasXNACKEnabled_isGFX7Only,
GIFBS_HasXNACKEnabled_isGFX9Plus,
GIFBS_HasXNACKEnabled_isNotGFX9Plus,
GIFBS_LDSRequiresM0Init_isGFX6GFX7GFX8GFX9GFX10,
GIFBS_LDSRequiresM0Init_isGFX7Plus,
GIFBS_NotHasTrue16BitInsts_isGFX10Plus,
GIFBS_NotHasTrue16BitInsts_isGFX8Plus,
GIFBS_NotLDSRequiresM0Init_isGFX6GFX7GFX8GFX9GFX10,
GIFBS_NotLDSRequiresM0Init_isGFX7Plus,
GIFBS_UseFakeTrue16Insts_isGFX8Plus,
GIFBS_has16BankLDS_isNotGFX90APlus,
GIFBS_has32BankLDS_isNotGFX90APlus,
GIFBS_isGFX11Only_isWave32,
GIFBS_isGFX11Only_isWave64,
GIFBS_isGFX12Plus_isWave32,
GIFBS_isGFX12Plus_isWave64,
GIFBS_DisableFlatScratch_HasD16LoadStore_HasUnrestrictedSOffset,
GIFBS_EnableFlatScratch_HasD16LoadStore_HasFlatScratchInsts,
GIFBS_EnableFlatScratch_HasFlatScratchInsts_HasFlatScratchSVSMode,
GIFBS_FP64Denormals_NotHasMinMaxDenormModes_isGFX12Plus,
GIFBS_FP64Denormals_NotHasMinMaxDenormModes_isNotGFX12Plus,
GIFBS_Has16BitInsts_HasMinMaxDenormModes_HasTrue16BitInsts,
GIFBS_Has16BitInsts_HasMinMaxDenormModes_NotHasTrue16BitInsts,
GIFBS_Has16BitInsts_has32BankLDS_isNotGFX90APlus,
GIFBS_HasMadMacF32Insts_NoFP32Denormals_isGFX6GFX7GFX10,
GIFBS_EnableFlatScratch_HasD16LoadStore_HasFlatScratchInsts_HasFlatScratchSVSMode,
GIFBS_FP16Denormals_Has16BitInsts_HasTrue16BitInsts_NotHasMinMaxDenormModes,
GIFBS_FP16Denormals_Has16BitInsts_NotHasMinMaxDenormModes_NotHasTrue16BitInsts,
};
constexpr static PredicateBitset FeatureBitsets[] {
{},
{Feature_DisableFlatScratchBit, },
{Feature_Has16BitInstsBit, },
{Feature_HasAddNoCarryInstsBit, },
{Feature_HasAtomicBufferGlobalPkAddF16InstsBit, },
{Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit, },
{Feature_HasAtomicBufferPkAddBF16InstBit, },
{Feature_HasAtomicCSubNoRtnInstsBit, },
{Feature_HasAtomicFMinFMaxF32GlobalInstsBit, },
{Feature_HasAtomicFMinFMaxF64GlobalInstsBit, },
{Feature_HasAtomicFaddNoRtnInstsBit, },
{Feature_HasAtomicFaddRtnInstsBit, },
{Feature_HasAtomicFlatPkAdd16InstsBit, },
{Feature_HasAtomicGlobalPkAddBF16InstBit, },
{Feature_HasD16LoadStoreBit, },
{Feature_HasDLInstsBit, },
{Feature_HasDot10InstsBit, },
{Feature_HasDot1InstsBit, },
{Feature_HasDot2InstsBit, },
{Feature_HasDot3InstsBit, },
{Feature_HasDot4InstsBit, },
{Feature_HasDot5InstsBit, },
{Feature_HasDot6InstsBit, },
{Feature_HasDot7InstsBit, },
{Feature_HasDot8InstsBit, },
{Feature_HasDot9InstsBit, },
{Feature_HasFlatAddressSpaceBit, },
{Feature_HasFlatAtomicFaddF32InstBit, },
{Feature_HasFlatBufferGlobalAtomicFaddF64InstBit, },
{Feature_HasFlatGlobalInstsBit, },
{Feature_HasFmaLegacy32Bit, },
{Feature_HasFmaMixInstsBit, },
{Feature_HasFmacF64InstBit, },
{Feature_HasGDSBit, },
{Feature_HasGFX10_BEncodingBit, },
{Feature_HasGetWaveIdInstBit, },
{Feature_HasIntClampBit, },
{Feature_HasLdsAtomicAddF64Bit, },
{Feature_HasMAIInstsBit, },
{Feature_HasMadMacF32InstsBit, },
{Feature_HasMed3_16Bit, },
{Feature_HasMinMaxDenormModesBit, },
{Feature_HasPackedD16VMemBit, },
{Feature_HasPackedFP32OpsBit, },
{Feature_HasPseudoScalarTransBit, },
{Feature_HasSALUFloatInstsBit, },
{Feature_HasSMemRealTimeBit, },
{Feature_HasSMemTimeInstBit, },
{Feature_HasShaderCyclesHiLoRegistersBit, },
{Feature_HasShaderCyclesRegisterBit, },
{Feature_HasTrue16BitInstsBit, },
{Feature_HasUnpackedD16VMemBit, },
{Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasVOP3PInstsBit, },
{Feature_HasXNACKEnabledBit, },
{Feature_LDSRequiresM0InitBit, },
{Feature_NotHasAddNoCarryInstsBit, },
{Feature_NotHasTrue16BitInstsBit, },
{Feature_NotLDSRequiresM0InitBit, },
{Feature_UseFakeTrue16InstsBit, },
{Feature_UseRealTrue16InstsBit, },
{Feature_isGFX10OnlyBit, },
{Feature_isGFX10PlusBit, },
{Feature_isGFX11OnlyBit, },
{Feature_isGFX11PlusBit, },
{Feature_isGFX12OnlyBit, },
{Feature_isGFX12PlusBit, },
{Feature_isGFX6Bit, },
{Feature_isGFX6GFX7Bit, },
{Feature_isGFX6GFX7GFX8GFX9Bit, },
{Feature_isGFX7GFX8GFX9Bit, },
{Feature_isGFX7OnlyBit, },
{Feature_isGFX7PlusBit, },
{Feature_isGFX8GFX9Bit, },
{Feature_isGFX8GFX9GFX10GFX11Bit, },
{Feature_isGFX8PlusBit, },
{Feature_isGFX908orGFX90ABit, },
{Feature_isGFX90APlusBit, },
{Feature_isGFX940PlusBit, },
{Feature_isGFX9GFX10Bit, },
{Feature_isGFX9OnlyBit, },
{Feature_isGFX9PlusBit, },
{Feature_isNotGFX12PlusBit, },
{Feature_isNotGFX90APlusBit, },
{Feature_isNotGFX9PlusBit, },
{Feature_isWave32Bit, },
{Feature_isWave64Bit, },
{Feature_DisableFlatScratchBit, Feature_HasD16LoadStoreBit, },
{Feature_DisableFlatScratchBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_EnableFlatScratchBit, Feature_HasFlatScratchInstsBit, },
{Feature_FP16DenormalsBit, Feature_NotHasMinMaxDenormModesBit, },
{Feature_FalsePredicateBit, Feature_NotHasMinMaxDenormModesBit, },
{Feature_Has16BitInstsBit, Feature_HasIntClampBit, },
{Feature_Has16BitInstsBit, Feature_NotHasTrue16BitInstsBit, },
{Feature_Has16BitInstsBit, Feature_UseFakeTrue16InstsBit, },
{Feature_Has16BitInstsBit, Feature_UseRealTrue16InstsBit, },
{Feature_Has16BitInstsBit, Feature_isGFX6GFX7GFX8GFX9Bit, },
{Feature_Has16BitInstsBit, Feature_isGFX8GFX9Bit, },
{Feature_Has16BitInstsBit, Feature_isGFX8OnlyBit, },
{Feature_Has16BitInstsBit, Feature_isNotGFX90APlusBit, },
{Feature_HasAddNoCarryInstsBit, Feature_HasIntClampBit, },
{Feature_HasAtomicBufferGlobalPkAddF16InstsBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasAtomicBufferPkAddBF16InstBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasAtomicCSubNoRtnInstsBit, Feature_isGFX12PlusBit, },
{Feature_HasAtomicDsPkAdd16InstsBit, Feature_HasGDSBit, },
{Feature_HasAtomicDsPkAdd16InstsBit, Feature_LDSRequiresM0InitBit, },
{Feature_HasAtomicDsPkAdd16InstsBit, Feature_NotLDSRequiresM0InitBit, },
{Feature_HasAtomicFMinFMaxF32FlatInstsBit, Feature_HasFlatAddressSpaceBit, },
{Feature_HasAtomicFMinFMaxF32FlatInstsBit, Feature_HasFlatGlobalInstsBit, },
{Feature_HasAtomicFMinFMaxF32GlobalInstsBit, Feature_HasFlatGlobalInstsBit, },
{Feature_HasAtomicFMinFMaxF32GlobalInstsBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasAtomicFMinFMaxF64FlatInstsBit, Feature_HasFlatAddressSpaceBit, },
{Feature_HasAtomicFMinFMaxF64FlatInstsBit, Feature_HasFlatGlobalInstsBit, },
{Feature_HasAtomicFMinFMaxF64GlobalInstsBit, Feature_HasFlatGlobalInstsBit, },
{Feature_HasAtomicFMinFMaxF64GlobalInstsBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasAtomicFaddNoRtnInstsBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasAtomicFaddRtnInstsBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasCvtFP8VOP1BugBit, Feature_isGFX9OnlyBit, },
{Feature_HasDot11InstsBit, Feature_isGFX12PlusBit, },
{Feature_HasExportInstsBit, Feature_isGFX12PlusBit, },
{Feature_HasFP8ConversionInstsBit, Feature_isGFX12PlusBit, },
{Feature_HasFP8ConversionInstsBit, Feature_isGFX940PlusBit, },
{Feature_HasFP8ConversionInstsBit, Feature_isGFX9PlusBit, },
{Feature_HasFlatAddressSpaceBit, Feature_isGFX12PlusBit, },
{Feature_HasFlatBufferGlobalAtomicFaddF64InstBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasFlatGlobalInstsBit, Feature_isGFX12PlusBit, },
{Feature_HasGDSBit, Feature_HasLDSFPAtomicAddF32Bit, },
{Feature_HasGDSBit, Feature_isGFX11PlusBit, },
{Feature_HasGDSBit, Feature_isGFX6GFX7GFX8GFX9GFX10Bit, },
{Feature_HasImageInstsBit, Feature_isGFX12PlusBit, },
{Feature_HasLDSFPAtomicAddF32Bit, Feature_LDSRequiresM0InitBit, },
{Feature_HasLDSFPAtomicAddF32Bit, Feature_NotLDSRequiresM0InitBit, },
{Feature_HasMADIntraFwdBugBit, Feature_isGFX11OnlyBit, },
{Feature_HasMadMacF32InstsBit, Feature_NoFP32DenormalsBit, },
{Feature_HasMadMixInstsBit, Feature_NoFP32DenormalsBit, },
{Feature_HasMinMaxDenormModesBit, Feature_isGFX12PlusBit, },
{Feature_HasMinMaxDenormModesBit, Feature_isNotGFX12PlusBit, },
{Feature_HasNoCvtFP8VOP1BugBit, Feature_isGFX9OnlyBit, },
{Feature_HasNotMADIntraFwdBugBit, Feature_isGFX9PlusBit, },
{Feature_HasPackedD16VMemBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_HasScalarStoresBit, Feature_isGFX8PlusBit, },
{Feature_HasTrue16BitInstsBit, Feature_isGFX10PlusBit, },
{Feature_HasUnalignedAccessModeBit, Feature_LDSRequiresM0InitBit, },
{Feature_HasUnalignedAccessModeBit, Feature_NotLDSRequiresM0InitBit, },
{Feature_HasXNACKEnabledBit, Feature_isGFX7OnlyBit, },
{Feature_HasXNACKEnabledBit, Feature_isGFX9PlusBit, },
{Feature_HasXNACKEnabledBit, Feature_isNotGFX9PlusBit, },
{Feature_LDSRequiresM0InitBit, Feature_isGFX6GFX7GFX8GFX9GFX10Bit, },
{Feature_LDSRequiresM0InitBit, Feature_isGFX7PlusBit, },
{Feature_NotHasTrue16BitInstsBit, Feature_isGFX10PlusBit, },
{Feature_NotHasTrue16BitInstsBit, Feature_isGFX8PlusBit, },
{Feature_NotLDSRequiresM0InitBit, Feature_isGFX6GFX7GFX8GFX9GFX10Bit, },
{Feature_NotLDSRequiresM0InitBit, Feature_isGFX7PlusBit, },
{Feature_UseFakeTrue16InstsBit, Feature_isGFX8PlusBit, },
{Feature_has16BankLDSBit, Feature_isNotGFX90APlusBit, },
{Feature_has32BankLDSBit, Feature_isNotGFX90APlusBit, },
{Feature_isGFX11OnlyBit, Feature_isWave32Bit, },
{Feature_isGFX11OnlyBit, Feature_isWave64Bit, },
{Feature_isGFX12PlusBit, Feature_isWave32Bit, },
{Feature_isGFX12PlusBit, Feature_isWave64Bit, },
{Feature_DisableFlatScratchBit, Feature_HasD16LoadStoreBit, Feature_HasUnrestrictedSOffsetBit, },
{Feature_EnableFlatScratchBit, Feature_HasD16LoadStoreBit, Feature_HasFlatScratchInstsBit, },
{Feature_EnableFlatScratchBit, Feature_HasFlatScratchInstsBit, Feature_HasFlatScratchSVSModeBit, },
{Feature_FP64DenormalsBit, Feature_NotHasMinMaxDenormModesBit, Feature_isGFX12PlusBit, },
{Feature_FP64DenormalsBit, Feature_NotHasMinMaxDenormModesBit, Feature_isNotGFX12PlusBit, },
{Feature_Has16BitInstsBit, Feature_HasMinMaxDenormModesBit, Feature_HasTrue16BitInstsBit, },
{Feature_Has16BitInstsBit, Feature_HasMinMaxDenormModesBit, Feature_NotHasTrue16BitInstsBit, },
{Feature_Has16BitInstsBit, Feature_has32BankLDSBit, Feature_isNotGFX90APlusBit, },
{Feature_HasMadMacF32InstsBit, Feature_NoFP32DenormalsBit, Feature_isGFX6GFX7GFX10Bit, },
{Feature_EnableFlatScratchBit, Feature_HasD16LoadStoreBit, Feature_HasFlatScratchInstsBit, Feature_HasFlatScratchSVSModeBit, },
{Feature_FP16DenormalsBit, Feature_Has16BitInstsBit, Feature_HasTrue16BitInstsBit, Feature_NotHasMinMaxDenormModesBit, },
{Feature_FP16DenormalsBit, Feature_Has16BitInstsBit, Feature_NotHasMinMaxDenormModesBit, Feature_NotHasTrue16BitInstsBit, },
};
enum {
GICP_Invalid,
GICP_gi_buf_soffset,
GICP_gi_ds_128bit_8byte_aligned,
GICP_gi_ds_1addr_1offset,
GICP_gi_ds_64bit_4byte_aligned,
GICP_gi_flat_offset,
GICP_gi_flat_scratch_offset,
GICP_gi_flat_scratch_saddr,
GICP_gi_flat_scratch_svaddr,
GICP_gi_global_offset,
GICP_gi_global_saddr,
GICP_gi_mubuf_addr64,
GICP_gi_mubuf_offset,
GICP_gi_mubuf_scratch_offen,
GICP_gi_mubuf_scratch_offset,
GICP_gi_smrd_buffer_imm,
GICP_gi_smrd_buffer_imm32,
GICP_gi_smrd_buffer_sgpr_imm,
GICP_gi_smrd_imm,
GICP_gi_smrd_imm32,
GICP_gi_smrd_sgpr,
GICP_gi_smrd_sgpr_imm,
GICP_gi_swmmacindex16,
GICP_gi_swmmacindex8,
GICP_gi_vcsrc,
GICP_gi_vinterpmods,
GICP_gi_vinterpmods_hi,
GICP_gi_vop3_mad_mix_mods,
GICP_gi_vop3_mad_mix_mods_ext,
GICP_gi_vop3_no_mods,
GICP_gi_vop3mods,
GICP_gi_vop3mods0,
GICP_gi_vop3modsnoncanonicalizing,
GICP_gi_vop3omods,
GICP_gi_vop3opsel,
GICP_gi_vop3opselmods,
GICP_gi_vop3pmods,
GICP_gi_vop3pmodsdot,
GICP_gi_vop3pmodsneg,
GICP_gi_vsrc0,
GICP_gi_wmmamods,
GICP_gi_wmmamodsf16Neg,
GICP_gi_wmmamodsf16NegAbs,
GICP_gi_wmmaopselvop3pmods,
GICP_gi_wmmavisrc,
};
AMDGPUInstructionSelector::ComplexMatcherMemFn
AMDGPUInstructionSelector::ComplexPredicateFns[] = {
nullptr,
&AMDGPUInstructionSelector::selectBUFSOffset,
&AMDGPUInstructionSelector::selectDS128Bit8ByteAligned,
&AMDGPUInstructionSelector::selectDS1Addr1Offset,
&AMDGPUInstructionSelector::selectDS64Bit4ByteAligned,
&AMDGPUInstructionSelector::selectFlatOffset,
&AMDGPUInstructionSelector::selectScratchOffset,
&AMDGPUInstructionSelector::selectScratchSAddr,
&AMDGPUInstructionSelector::selectScratchSVAddr,
&AMDGPUInstructionSelector::selectGlobalOffset,
&AMDGPUInstructionSelector::selectGlobalSAddr,
&AMDGPUInstructionSelector::selectMUBUFAddr64,
&AMDGPUInstructionSelector::selectMUBUFOffset,
&AMDGPUInstructionSelector::selectMUBUFScratchOffen,
&AMDGPUInstructionSelector::selectMUBUFScratchOffset,
&AMDGPUInstructionSelector::selectSMRDBufferImm,
&AMDGPUInstructionSelector::selectSMRDBufferImm32,
&AMDGPUInstructionSelector::selectSMRDBufferSgprImm,
&AMDGPUInstructionSelector::selectSmrdImm,
&AMDGPUInstructionSelector::selectSmrdImm32,
&AMDGPUInstructionSelector::selectSmrdSgpr,
&AMDGPUInstructionSelector::selectSmrdSgprImm,
&AMDGPUInstructionSelector::selectSWMMACIndex16,
&AMDGPUInstructionSelector::selectSWMMACIndex8,
&AMDGPUInstructionSelector::selectVCSRC,
&AMDGPUInstructionSelector::selectVINTERPMods,
&AMDGPUInstructionSelector::selectVINTERPModsHi,
&AMDGPUInstructionSelector::selectVOP3PMadMixMods,
&AMDGPUInstructionSelector::selectVOP3PMadMixModsExt,
&AMDGPUInstructionSelector::selectVOP3NoMods,
&AMDGPUInstructionSelector::selectVOP3Mods,
&AMDGPUInstructionSelector::selectVOP3Mods0,
&AMDGPUInstructionSelector::selectVOP3ModsNonCanonicalizing,
&AMDGPUInstructionSelector::selectVOP3OMods,
&AMDGPUInstructionSelector::selectVOP3OpSelMods,
&AMDGPUInstructionSelector::selectVOP3OpSelMods,
&AMDGPUInstructionSelector::selectVOP3PMods,
&AMDGPUInstructionSelector::selectVOP3PModsDOT,
&AMDGPUInstructionSelector::selectVOP3PModsNeg,
&AMDGPUInstructionSelector::selectVSRC0,
&AMDGPUInstructionSelector::selectWMMAModsF32NegAbs,
&AMDGPUInstructionSelector::selectWMMAModsF16Neg,
&AMDGPUInstructionSelector::selectWMMAModsF16NegAbs,
&AMDGPUInstructionSelector::selectWMMAOpSelVOP3PMods,
&AMDGPUInstructionSelector::selectWMMAVISrc,
};
enum {
GICXXPred_MI_Predicate_aligned_smrd_load = GICXXPred_Invalid + 1,
GICXXPred_MI_Predicate_anonymous_18616,
GICXXPred_MI_Predicate_anonymous_18619,
GICXXPred_MI_Predicate_anonymous_18620,
GICXXPred_MI_Predicate_anonymous_18621,
GICXXPred_MI_Predicate_anonymous_18622,
GICXXPred_MI_Predicate_anonymous_18623,
GICXXPred_MI_Predicate_anonymous_18626,
GICXXPred_MI_Predicate_anonymous_18627,
GICXXPred_MI_Predicate_anonymous_18628,
GICXXPred_MI_Predicate_anonymous_18629,
GICXXPred_MI_Predicate_anonymous_18630,
GICXXPred_MI_Predicate_anonymous_18631,
GICXXPred_MI_Predicate_anonymous_18632,
GICXXPred_MI_Predicate_anonymous_18633,
GICXXPred_MI_Predicate_anonymous_18634,
GICXXPred_MI_Predicate_anonymous_18635,
GICXXPred_MI_Predicate_anonymous_18636,
GICXXPred_MI_Predicate_anonymous_18637,
GICXXPred_MI_Predicate_anonymous_18638,
GICXXPred_MI_Predicate_anonymous_18639,
GICXXPred_MI_Predicate_anonymous_18640,
GICXXPred_MI_Predicate_anonymous_18641,
GICXXPred_MI_Predicate_anonymous_18642,
GICXXPred_MI_Predicate_anonymous_18643,
GICXXPred_MI_Predicate_anonymous_18644,
GICXXPred_MI_Predicate_anonymous_18645,
GICXXPred_MI_Predicate_anonymous_18646,
GICXXPred_MI_Predicate_anonymous_18647,
GICXXPred_MI_Predicate_anonymous_18648,
GICXXPred_MI_Predicate_anonymous_18649,
GICXXPred_MI_Predicate_anonymous_18650,
GICXXPred_MI_Predicate_anonymous_18651,
GICXXPred_MI_Predicate_anonymous_18652,
GICXXPred_MI_Predicate_anonymous_18653,
GICXXPred_MI_Predicate_anonymous_18654,
GICXXPred_MI_Predicate_anonymous_18655,
GICXXPred_MI_Predicate_anonymous_18656,
GICXXPred_MI_Predicate_anonymous_18657,
GICXXPred_MI_Predicate_anonymous_18658,
GICXXPred_MI_Predicate_anonymous_18659,
GICXXPred_MI_Predicate_anonymous_18660,
GICXXPred_MI_Predicate_anonymous_18661,
GICXXPred_MI_Predicate_anonymous_18662,
GICXXPred_MI_Predicate_anonymous_18663,
GICXXPred_MI_Predicate_anonymous_18664,
GICXXPred_MI_Predicate_anonymous_18665,
GICXXPred_MI_Predicate_anonymous_18666,
GICXXPred_MI_Predicate_anonymous_18667,
GICXXPred_MI_Predicate_anonymous_18674,
GICXXPred_MI_Predicate_anonymous_18686,
GICXXPred_MI_Predicate_anonymous_22878,
GICXXPred_MI_Predicate_anonymous_23712,
GICXXPred_MI_Predicate_anonymous_23714,
GICXXPred_MI_Predicate_anonymous_23996,
GICXXPred_MI_Predicate_anonymous_23998,
GICXXPred_MI_Predicate_anonymous_24000,
GICXXPred_MI_Predicate_anonymous_24002,
GICXXPred_MI_Predicate_anonymous_24004,
GICXXPred_MI_Predicate_anonymous_24008,
GICXXPred_MI_Predicate_anonymous_24010,
GICXXPred_MI_Predicate_anonymous_24012,
GICXXPred_MI_Predicate_anonymous_24014,
GICXXPred_MI_Predicate_anonymous_24016,
GICXXPred_MI_Predicate_anonymous_24264,
GICXXPred_MI_Predicate_anonymous_24266,
GICXXPred_MI_Predicate_anonymous_24493,
GICXXPred_MI_Predicate_anonymous_24503,
GICXXPred_MI_Predicate_anonymous_24506,
GICXXPred_MI_Predicate_anonymous_24594,
GICXXPred_MI_Predicate_anonymous_24875,
GICXXPred_MI_Predicate_anonymous_24878,
GICXXPred_MI_Predicate_anonymous_24881,
GICXXPred_MI_Predicate_anonymous_24883,
GICXXPred_MI_Predicate_anonymous_24886,
GICXXPred_MI_Predicate_anonymous_24888,
GICXXPred_MI_Predicate_anonymous_24891,
GICXXPred_MI_Predicate_anonymous_24894,
GICXXPred_MI_Predicate_anonymous_24898,
GICXXPred_MI_Predicate_anonymous_24906,
GICXXPred_MI_Predicate_anonymous_24940,
GICXXPred_MI_Predicate_anonymous_25048,
GICXXPred_MI_Predicate_anonymous_25051,
GICXXPred_MI_Predicate_anonymous_25053,
GICXXPred_MI_Predicate_anonymous_25055,
GICXXPred_MI_Predicate_anonymous_25057,
GICXXPred_MI_Predicate_anonymous_25059,
GICXXPred_MI_Predicate_anonymous_25061,
GICXXPred_MI_Predicate_anonymous_25975,
GICXXPred_MI_Predicate_anonymous_25977,
GICXXPred_MI_Predicate_anonymous_25981,
GICXXPred_MI_Predicate_anonymous_25983,
GICXXPred_MI_Predicate_anonymous_25985,
GICXXPred_MI_Predicate_anonymous_25987,
GICXXPred_MI_Predicate_anonymous_25991,
GICXXPred_MI_Predicate_anonymous_25993,
GICXXPred_MI_Predicate_anonymous_25995,
GICXXPred_MI_Predicate_anonymous_25997,
GICXXPred_MI_Predicate_anonymous_25999,
GICXXPred_MI_Predicate_anonymous_26001,
GICXXPred_MI_Predicate_anonymous_26003,
GICXXPred_MI_Predicate_anonymous_26005,
GICXXPred_MI_Predicate_anonymous_26009,
GICXXPred_MI_Predicate_anonymous_26011,
GICXXPred_MI_Predicate_anonymous_26013,
GICXXPred_MI_Predicate_anonymous_26015,
GICXXPred_MI_Predicate_anonymous_26019,
GICXXPred_MI_Predicate_anonymous_26021,
GICXXPred_MI_Predicate_anonymous_26025,
GICXXPred_MI_Predicate_anonymous_26027,
GICXXPred_MI_Predicate_anonymous_26029,
GICXXPred_MI_Predicate_anonymous_26031,
GICXXPred_MI_Predicate_anonymous_26035,
GICXXPred_MI_Predicate_anonymous_26037,
GICXXPred_MI_Predicate_anonymous_26039,
GICXXPred_MI_Predicate_anonymous_26041,
GICXXPred_MI_Predicate_anonymous_26043,
GICXXPred_MI_Predicate_anonymous_26045,
GICXXPred_MI_Predicate_anonymous_26047,
GICXXPred_MI_Predicate_anonymous_26049,
GICXXPred_MI_Predicate_anonymous_26053,
GICXXPred_MI_Predicate_anonymous_26055,
GICXXPred_MI_Predicate_anonymous_26057,
GICXXPred_MI_Predicate_anonymous_26059,
GICXXPred_MI_Predicate_anonymous_26063,
GICXXPred_MI_Predicate_anonymous_26065,
GICXXPred_MI_Predicate_anonymous_26067,
GICXXPred_MI_Predicate_anonymous_26069,
GICXXPred_MI_Predicate_anonymous_26073,
GICXXPred_MI_Predicate_anonymous_26075,
GICXXPred_MI_Predicate_anonymous_26079,
GICXXPred_MI_Predicate_anonymous_26081,
GICXXPred_MI_Predicate_anonymous_26083,
GICXXPred_MI_Predicate_anonymous_26085,
GICXXPred_MI_Predicate_anonymous_26087,
GICXXPred_MI_Predicate_anonymous_26089,
GICXXPred_MI_Predicate_anonymous_26091,
GICXXPred_MI_Predicate_anonymous_26093,
GICXXPred_MI_Predicate_anonymous_26097,
GICXXPred_MI_Predicate_anonymous_26099,
GICXXPred_MI_Predicate_anonymous_26101,
GICXXPred_MI_Predicate_anonymous_26103,
GICXXPred_MI_Predicate_anonymous_26107,
GICXXPred_MI_Predicate_anonymous_26109,
GICXXPred_MI_Predicate_anonymous_26111,
GICXXPred_MI_Predicate_anonymous_26113,
GICXXPred_MI_Predicate_anonymous_26115,
GICXXPred_MI_Predicate_anonymous_26117,
GICXXPred_MI_Predicate_anonymous_26121,
GICXXPred_MI_Predicate_anonymous_26123,
GICXXPred_MI_Predicate_anonymous_26125,
GICXXPred_MI_Predicate_anonymous_26127,
GICXXPred_MI_Predicate_anonymous_26129,
GICXXPred_MI_Predicate_anonymous_26131,
GICXXPred_MI_Predicate_anonymous_26133,
GICXXPred_MI_Predicate_anonymous_26135,
GICXXPred_MI_Predicate_anonymous_26137,
GICXXPred_MI_Predicate_anonymous_26139,
GICXXPred_MI_Predicate_anonymous_26143,
GICXXPred_MI_Predicate_anonymous_26145,
GICXXPred_MI_Predicate_anonymous_26147,
GICXXPred_MI_Predicate_anonymous_26149,
GICXXPred_MI_Predicate_anonymous_26151,
GICXXPred_MI_Predicate_anonymous_26153,
GICXXPred_MI_Predicate_anonymous_26155,
GICXXPred_MI_Predicate_anonymous_26157,
GICXXPred_MI_Predicate_anonymous_36507,
GICXXPred_MI_Predicate_anonymous_36814,
GICXXPred_MI_Predicate_anonymous_36816,
GICXXPred_MI_Predicate_anonymous_36862,
GICXXPred_MI_Predicate_anonymous_36875,
GICXXPred_MI_Predicate_anonymous_36876,
GICXXPred_MI_Predicate_anonymous_36879,
GICXXPred_MI_Predicate_anonymous_36882,
GICXXPred_MI_Predicate_anonymous_36885,
GICXXPred_MI_Predicate_anonymous_36891,
GICXXPred_MI_Predicate_anonymous_36894,
GICXXPred_MI_Predicate_anonymous_36896,
GICXXPred_MI_Predicate_anonymous_36903,
GICXXPred_MI_Predicate_anonymous_36906,
GICXXPred_MI_Predicate_anonymous_36908,
GICXXPred_MI_Predicate_anonymous_36916,
GICXXPred_MI_Predicate_anonymous_36921,
GICXXPred_MI_Predicate_anonymous_36925,
GICXXPred_MI_Predicate_anonymous_36929,
GICXXPred_MI_Predicate_anonymous_37075,
GICXXPred_MI_Predicate_anonymous_37078,
GICXXPred_MI_Predicate_anonymous_37083,
GICXXPred_MI_Predicate_anonymous_37087,
GICXXPred_MI_Predicate_anonymous_37133,
GICXXPred_MI_Predicate_anonymous_37135,
GICXXPred_MI_Predicate_anonymous_37174,
GICXXPred_MI_Predicate_anonymous_37197,
GICXXPred_MI_Predicate_anonymous_37199,
GICXXPred_MI_Predicate_anonymous_37207,
GICXXPred_MI_Predicate_anonymous_37209,
GICXXPred_MI_Predicate_anonymous_37226,
GICXXPred_MI_Predicate_anonymous_37228,
GICXXPred_MI_Predicate_anonymous_37230,
GICXXPred_MI_Predicate_anonymous_37232,
GICXXPred_MI_Predicate_anonymous_37234,
GICXXPred_MI_Predicate_csh_mask_16,
GICXXPred_MI_Predicate_csh_mask_32,
GICXXPred_MI_Predicate_csh_mask_64,
GICXXPred_MI_Predicate_fmaxnum_like_nnan,
GICXXPred_MI_Predicate_fminnum_like_nnan,
GICXXPred_MI_Predicate_is_canonicalized,
GICXXPred_MI_Predicate_load_align_less_than_4_local,
GICXXPred_MI_Predicate_load_align_less_than_4_local_m0,
GICXXPred_MI_Predicate_shl1_add,
GICXXPred_MI_Predicate_shl2_add,
GICXXPred_MI_Predicate_shl3_add,
GICXXPred_MI_Predicate_shl4_add,
GICXXPred_MI_Predicate_shl_0_to_4,
GICXXPred_MI_Predicate_smrd_extloadi8,
GICXXPred_MI_Predicate_smrd_extloadi16,
GICXXPred_MI_Predicate_smrd_load,
GICXXPred_MI_Predicate_smrd_prefetch,
GICXXPred_MI_Predicate_smrd_sextloadi8,
GICXXPred_MI_Predicate_smrd_sextloadi16,
GICXXPred_MI_Predicate_smrd_zextloadi8,
GICXXPred_MI_Predicate_smrd_zextloadi16,
GICXXPred_MI_Predicate_store_align_less_than_4_local,
GICXXPred_MI_Predicate_store_align_less_than_4_local_m0,
};
bool AMDGPUInstructionSelector::testMIPredicate_MI(unsigned PredicateID, const MachineInstr & MI, const MatcherState &State) const {
const MachineFunction &MF = *MI.getParent()->getParent();
const MachineRegisterInfo &MRI = MF.getRegInfo();
const auto &Operands = State.RecordedOperands;
(void)Operands;
(void)MRI;
switch (PredicateID) {
case GICXXPred_MI_Predicate_aligned_smrd_load: {
auto &Ld = cast<GLoad>(MI);
TypeSize Size = Ld.getMMO().getSize().getValue();
return Size <= 4 || Ld.getMMO().getAlign().value() >= Size;
llvm_unreachable("aligned_smrd_load should have returned");
}
case GICXXPred_MI_Predicate_anonymous_18616: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18619: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18620: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18621: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18622: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18623: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18626: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18627: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18628: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18629: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18630: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18631: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18632: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18633: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18634: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18635: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18636: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18637: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18638: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18639: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18640: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18641: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18642: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18643: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18644: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18645: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18646: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18647: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18648: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18649: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18650: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18651: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18652: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18653: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18654: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18655: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18656: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18657: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18658: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18659: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18660: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18661: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18662: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18663: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18664: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18665: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18666: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18667: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18674: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_18686: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_22878: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_23712: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_23714: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_23996: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_23998: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24000: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24002: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24004: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24008: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24010: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24012: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24014: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24016: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24264: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24266: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24493: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24503: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24506: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24594: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_24875: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24875 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24878: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24878 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24881: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24881 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24883: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24883 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24886: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24886 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24888: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24888 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24891: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24891 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24894: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24894 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24898: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24898 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24906: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24906 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_24940: {
const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
int ConstantBusUses = 0;
for (unsigned i = 0; i < 3; ++i) {
const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
if (++ConstantBusUses > ConstantBusLimit)
return false;
}
}
return true;
llvm_unreachable("anonymous_24940 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_25048: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25051: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25053: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25055: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25057: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25059: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25061: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_25975: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25977: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25981: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25983: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25985: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25987: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25991: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25993: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25995: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25997: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_25999: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26001: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26003: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26005: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26009: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26011: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26013: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26015: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26019: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26021: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26025: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26027: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26029: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26031: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26035: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26037: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26039: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26041: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26043: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26045: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26047: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26049: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26053: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26055: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26057: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26059: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26063: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26065: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26067: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26069: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26073: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26075: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26079: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26081: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26083: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26085: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26087: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26089: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26091: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26093: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26097: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26099: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26101: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26103: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26107: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26109: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26111: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26113: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26115: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26117: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26121: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26123: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26125: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26127: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26129: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26131: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26133: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26135: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26137: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26139: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26143: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26145: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26147: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26149: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26151: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26153: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26155: {
return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_26157: {
return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
}
case GICXXPred_MI_Predicate_anonymous_36507: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36814: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36816: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36862: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36875: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36876: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36879: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36882: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36885: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36891: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36894: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36896: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36903: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36906: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36908: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36916: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36921: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36925: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_36929: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37075: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37078: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37083: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37087: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37133: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37135: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37174: {
const SITargetLowering *TLI = static_cast<const SITargetLowering *>(
MF.getSubtarget().getTargetLowering());
return TLI->isCanonicalized(MI.getOperand(1).getReg(), MF) &&
TLI->isCanonicalized(MI.getOperand(2).getReg(), MF);
llvm_unreachable("anonymous_37174 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_37197: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37199: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37207: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37209: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37226: {
const SITargetLowering *TLI = static_cast<const SITargetLowering *>(
MF.getSubtarget().getTargetLowering());
return TLI->isCanonicalized(MI.getOperand(1).getReg(), MF);
llvm_unreachable("anonymous_37226 should have returned");
}
case GICXXPred_MI_Predicate_anonymous_37228: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37230: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37232: {
return true;
}
case GICXXPred_MI_Predicate_anonymous_37234: {
return true;
}
case GICXXPred_MI_Predicate_csh_mask_16: {
return isUnneededShiftMask(MI, 4);
}
case GICXXPred_MI_Predicate_csh_mask_32: {
return isUnneededShiftMask(MI, 5);
}
case GICXXPred_MI_Predicate_csh_mask_64: {
return isUnneededShiftMask(MI, 6);
}
case GICXXPred_MI_Predicate_fmaxnum_like_nnan: {
return isKnownNeverNaN(MI.getOperand(0).getReg(), MRI);
}
case GICXXPred_MI_Predicate_fminnum_like_nnan: {
return isKnownNeverNaN(MI.getOperand(0).getReg(), MRI);
}
case GICXXPred_MI_Predicate_is_canonicalized: {
const SITargetLowering *TLI = static_cast<const SITargetLowering *>(
MF.getSubtarget().getTargetLowering());
const MachineOperand &Dst = MI.getOperand(0);
assert(Dst.isDef());
return TLI->isCanonicalized(Dst.getReg(), MF);
llvm_unreachable("is_canonicalized should have returned");
}
case GICXXPred_MI_Predicate_load_align_less_than_4_local: {
return (*MI.memoperands_begin())->getAlign() < 4;
}
case GICXXPred_MI_Predicate_load_align_less_than_4_local_m0: {
return (*MI.memoperands_begin())->getAlign() < 4;
}
case GICXXPred_MI_Predicate_shl1_add: {
return true;
}
case GICXXPred_MI_Predicate_shl2_add: {
return true;
}
case GICXXPred_MI_Predicate_shl3_add: {
return true;
}
case GICXXPred_MI_Predicate_shl4_add: {
return true;
}
case GICXXPred_MI_Predicate_shl_0_to_4: {
int64_t Imm = 0;
if (!mi_match(MI.getOperand(2).getReg(), MRI, m_ICst(Imm)) &&
!mi_match(MI.getOperand(2).getReg(), MRI, m_Copy(m_ICst(Imm))))
return false;
return (uint64_t)Imm <= 4;
llvm_unreachable("shl_0_to_4 should have returned");
}
case GICXXPred_MI_Predicate_smrd_extloadi8: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_extloadi8 should have returned");
}
case GICXXPred_MI_Predicate_smrd_extloadi16: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_extloadi16 should have returned");
}
case GICXXPred_MI_Predicate_smrd_load: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_load should have returned");
}
case GICXXPred_MI_Predicate_smrd_prefetch: {
return isInstrUniform(MI);
}
case GICXXPred_MI_Predicate_smrd_sextloadi8: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_sextloadi8 should have returned");
}
case GICXXPred_MI_Predicate_smrd_sextloadi16: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_sextloadi16 should have returned");
}
case GICXXPred_MI_Predicate_smrd_zextloadi8: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_zextloadi8 should have returned");
}
case GICXXPred_MI_Predicate_smrd_zextloadi16: {
if (!MI.hasOneMemOperand())
return false;
if (!isInstrUniform(MI))
return false;
SmallVector<GEPInfo, 4> AddrInfo;
getAddrModeInfo(MI, MRI, AddrInfo);
if (hasVgprParts(AddrInfo))
return false;
return true;
llvm_unreachable("smrd_zextloadi16 should have returned");
}
case GICXXPred_MI_Predicate_store_align_less_than_4_local: {
return (*MI.memoperands_begin())->getAlign() < 4;
}
case GICXXPred_MI_Predicate_store_align_less_than_4_local_m0: {
return (*MI.memoperands_begin())->getAlign() < 4;
}
}
llvm_unreachable("Unknown predicate");
return false;
}
enum {
GICXXPred_I64_Predicate_IMMZeroBasedBitfieldMask = GICXXPred_Invalid + 1,
GICXXPred_I64_Predicate_NegSubInlineConst32,
GICXXPred_I64_Predicate_NegSubInlineIntConst16,
GICXXPred_I64_Predicate_SIMM16bit,
GICXXPred_I64_Predicate_ShiftAmt32Imm,
GICXXPred_I64_Predicate_SupportedRoundMode,
GICXXPred_I64_Predicate_i32imm_one,
GICXXPred_I64_Predicate_i32imm_zero,
GICXXPred_I64_Predicate_i64imm_32bit,
};
bool AMDGPUInstructionSelector::testImmPredicate_I64(unsigned PredicateID, int64_t Imm) const {
switch (PredicateID) {
case GICXXPred_I64_Predicate_IMMZeroBasedBitfieldMask: {
return isMask_32(Imm);
}
case GICXXPred_I64_Predicate_NegSubInlineConst32: {
return Imm < -16 && Imm >= -64;
}
case GICXXPred_I64_Predicate_NegSubInlineIntConst16: {
return Imm < -16 && Imm >= -64;
}
case GICXXPred_I64_Predicate_SIMM16bit: {
return isInt<16>(Imm) || isUInt<16>(Imm);
}
case GICXXPred_I64_Predicate_ShiftAmt32Imm: {
return Imm < 32;
}
case GICXXPred_I64_Predicate_SupportedRoundMode: {
return Imm == (int)RoundingMode::TowardZero ||
Imm == (int)RoundingMode::NearestTiesToEven ||
Imm == (int)RoundingMode::TowardPositive ||
Imm == (int)RoundingMode::TowardNegative;
}
case GICXXPred_I64_Predicate_i32imm_one: {
return Imm == 1;
}
case GICXXPred_I64_Predicate_i32imm_zero: {
return Imm == 0;
}
case GICXXPred_I64_Predicate_i64imm_32bit: {
return (Imm & 0xffffffffULL) == static_cast<uint64_t>(Imm);
}
}
llvm_unreachable("Unknown predicate");
return false;
}
enum {
GICXXPred_APFloat_Predicate_InlineImmFP32 = GICXXPred_Invalid + 1,
GICXXPred_APFloat_Predicate_InlineImmFP64,
GICXXPred_APFloat_Predicate_fpimm_neg_pow2_prefer_ldexp_f64,
GICXXPred_APFloat_Predicate_fpimm_pos_pow2_prefer_ldexp_f64,
};
bool AMDGPUInstructionSelector::testImmPredicate_APFloat(unsigned PredicateID, const APFloat & Imm) const {
switch (PredicateID) {
case GICXXPred_APFloat_Predicate_InlineImmFP32: {
return isInlineImmediate(Imm);
}
case GICXXPred_APFloat_Predicate_InlineImmFP64: {
return isInlineImmediate(Imm);
}
case GICXXPred_APFloat_Predicate_fpimm_neg_pow2_prefer_ldexp_f64: {
if (!Imm.isNegative())
return false;
int Exp = Imm.getExactLog2Abs();
return Exp != INT_MIN && (Exp < -1 || Exp > 2);
llvm_unreachable("fpimm_neg_pow2_prefer_ldexp_f64 should have returned");
}
case GICXXPred_APFloat_Predicate_fpimm_pos_pow2_prefer_ldexp_f64: {
if (Imm.isNegative())
return false;
int Exp = Imm.getExactLog2Abs();
return Exp != INT_MIN && (Exp < -1 || Exp > 2);
llvm_unreachable("fpimm_pos_pow2_prefer_ldexp_f64 should have returned");
}
}
llvm_unreachable("Unknown predicate");
return false;
}
enum {
GICXXPred_APInt_Predicate_InlineImm64 = GICXXPred_Invalid + 1,
};
bool AMDGPUInstructionSelector::testImmPredicate_APInt(unsigned PredicateID, const APInt & Imm) const {
switch (PredicateID) {
case GICXXPred_APInt_Predicate_InlineImm64: {
return isInlineImmediate(Imm);
}
}
llvm_unreachable("Unknown predicate");
return false;
}
bool AMDGPUInstructionSelector::testSimplePredicate(unsigned) const {
llvm_unreachable("AMDGPUInstructionSelector does not support simple predicates!");
return false;
}
enum {
GICR_Invalid,
GICR_renderBitcastFPImm32,
GICR_renderBitcastFPImm64,
GICR_renderExtractCPol,
GICR_renderExtractCpolSetGLC,
GICR_renderExtractSWZ,
GICR_renderFPPow2ToExponent,
GICR_renderFrameIndex,
GICR_renderNegateImm,
GICR_renderOpSelTImm,
GICR_renderPopcntImm,
GICR_renderRoundMode,
GICR_renderTruncTImm,
};
AMDGPUInstructionSelector::CustomRendererFn
AMDGPUInstructionSelector::CustomRenderers[] = {
nullptr,
&AMDGPUInstructionSelector::renderBitcastFPImm32,
&AMDGPUInstructionSelector::renderBitcastFPImm64,
&AMDGPUInstructionSelector::renderExtractCPol,
&AMDGPUInstructionSelector::renderExtractCpolSetGLC,
&AMDGPUInstructionSelector::renderExtractSWZ,
&AMDGPUInstructionSelector::renderFPPow2ToExponent,
&AMDGPUInstructionSelector::renderFrameIndex,
&AMDGPUInstructionSelector::renderNegateImm,
&AMDGPUInstructionSelector::renderOpSelTImm,
&AMDGPUInstructionSelector::renderPopcntImm,
&AMDGPUInstructionSelector::renderRoundMode,
&AMDGPUInstructionSelector::renderTruncTImm,
};
bool AMDGPUInstructionSelector::selectImpl(MachineInstr &I, CodeGenCoverage &CoverageInfo) const {
const PredicateBitset AvailableFeatures = getAvailableFeatures();
MachineIRBuilder B(I);
State.MIs.clear();
State.MIs.push_back(&I);
if (executeMatchTable(*this, State, ExecInfo, B, getMatchTable(), TII, MF->getRegInfo(), TRI, RBI, AvailableFeatures, &CoverageInfo)) {
return true;
}
return false;
}
bool AMDGPUInstructionSelector::runCustomAction(unsigned, const MatcherState&, NewMIVector &) const {
llvm_unreachable("AMDGPUInstructionSelector does not support custom C++ actions!");
}
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
#define GIMT_Encode2 …
#define GIMT_Encode4 …
#define GIMT_Encode8 …
#else
#define GIMT_Encode2 …
#define GIMT_Encode4 …
#define GIMT_Encode8 …
#endif
const uint8_t *AMDGPUInstructionSelector::getMatchTable() const {
constexpr static uint8_t MatchTable0[] = {
GIM_SwitchOpcode, 0, GIMT_Encode2(53), GIMT_Encode2(3660), GIMT_Encode4(587105),
GIMT_Encode4(14438),
GIMT_Encode4(18072),
GIMT_Encode4(18573), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(18859),
GIMT_Encode4(19871),
GIMT_Encode4(34395), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(38819), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(38871), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(41906), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(50379),
GIMT_Encode4(50471), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(50799),
GIMT_Encode4(51127),
GIMT_Encode4(51267),
GIMT_Encode4(51325),
GIMT_Encode4(126714),
GIMT_Encode4(130680), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(134522), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(170428),
GIMT_Encode4(171554),
GIMT_Encode4(173766),
GIMT_Encode4(176278),
GIMT_Encode4(178790), GIMT_Encode4(0),
GIMT_Encode4(181302),
GIMT_Encode4(183814),
GIMT_Encode4(186326),
GIMT_Encode4(188838),
GIMT_Encode4(191350),
GIMT_Encode4(193862),
GIMT_Encode4(196374), GIMT_Encode4(0),
GIMT_Encode4(199233),
GIMT_Encode4(201879),
GIMT_Encode4(204525),
GIMT_Encode4(207037), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(209549),
GIMT_Encode4(209568), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(210004),
GIMT_Encode4(223248),
GIMT_Encode4(229370),
GIMT_Encode4(246429),
GIMT_Encode4(247668),
GIMT_Encode4(248231),
GIMT_Encode4(248356),
GIMT_Encode4(249082), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(249460), GIMT_Encode4(0),
GIMT_Encode4(250251),
GIMT_Encode4(251839),
GIMT_Encode4(253029),
GIMT_Encode4(253981), GIMT_Encode4(0),
GIMT_Encode4(254933),
GIMT_Encode4(254978), GIMT_Encode4(0),
GIMT_Encode4(255010),
GIMT_Encode4(256272), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(260393), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(260597),
GIMT_Encode4(260706),
GIMT_Encode4(260815),
GIMT_Encode4(261081),
GIMT_Encode4(261283),
GIMT_Encode4(261549), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(261751),
GIMT_Encode4(263109),
GIMT_Encode4(263473),
GIMT_Encode4(264859),
GIMT_Encode4(266500), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(267446), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(267564), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(267787), GIMT_Encode4(0),
GIMT_Encode4(268010), GIMT_Encode4(0),
GIMT_Encode4(268373),
GIMT_Encode4(270706),
GIMT_Encode4(271051),
GIMT_Encode4(271734),
GIMT_Encode4(272298),
GIMT_Encode4(272862),
GIMT_Encode4(273409), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(273956),
GIMT_Encode4(274958),
GIMT_Encode4(275515),
GIMT_Encode4(275784),
GIMT_Encode4(276984),
GIMT_Encode4(313323),
GIMT_Encode4(349662),
GIMT_Encode4(386001),
GIMT_Encode4(422340),
GIMT_Encode4(423810), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(425280), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(425404), GIMT_Encode4(0),
GIMT_Encode4(425481),
GIMT_Encode4(428585),
GIMT_Encode4(431851),
GIMT_Encode4(434955),
GIMT_Encode4(438059), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(438196), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(438230), GIMT_Encode4(0),
GIMT_Encode4(438329),
GIMT_Encode4(438431),
GIMT_Encode4(438643),
GIMT_Encode4(439590),
GIMT_Encode4(439838), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(440166),
GIMT_Encode4(440458), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(440972),
GIMT_Encode4(441934),
GIMT_Encode4(442298), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(443566), GIMT_Encode4(0),
GIMT_Encode4(444454), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(444817),
GIMT_Encode4(447097),
GIMT_Encode4(449481),
GIMT_Encode4(451865),
GIMT_Encode4(456159),
GIMT_Encode4(456776),
GIMT_Encode4(459160),
GIMT_Encode4(463986),
GIMT_Encode4(467602),
GIMT_Encode4(471218),
GIMT_Encode4(473602),
GIMT_Encode4(475986),
GIMT_Encode4(478370),
GIMT_Encode4(480754),
GIMT_Encode4(483138),
GIMT_Encode4(486682),
GIMT_Encode4(489066),
GIMT_Encode4(491450),
GIMT_Encode4(493834),
GIMT_Encode4(509756),
GIMT_Encode4(514664),
GIMT_Encode4(522409),
GIMT_Encode4(524929),
GIMT_Encode4(525535),
GIMT_Encode4(526141),
GIMT_Encode4(526747),
GIMT_Encode4(527353),
GIMT_Encode4(529873),
GIMT_Encode4(530479),
GIMT_Encode4(531085),
GIMT_Encode4(531691),
GIMT_Encode4(532297),
GIMT_Encode4(548163),
GIMT_Encode4(548765),
GIMT_Encode4(554253),
GIMT_Encode4(560518),
GIMT_Encode4(561120),
GIMT_Encode4(562059),
GIMT_Encode4(562104),
GIMT_Encode4(562149),
GIMT_Encode4(562194),
GIMT_Encode4(562239),
GIMT_Encode4(562269),
GIMT_Encode4(562371),
GIMT_Encode4(562470),
GIMT_Encode4(562543),
GIMT_Encode4(562739), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(562812),
GIMT_Encode4(562862),
GIMT_Encode4(563010),
GIMT_Encode4(565312),
GIMT_Encode4(565450),
GIMT_Encode4(565588),
GIMT_Encode4(565726),
GIMT_Encode4(565864), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(565971),
GIMT_Encode4(571455),
GIMT_Encode4(576884),
GIMT_Encode4(582352),
GIMT_Encode4(586957),
GIM_SwitchType, 0, 0, GIMT_Encode2(7), GIMT_Encode2(12), GIMT_Encode4(18071),
GIMT_Encode4(14469),
GIMT_Encode4(14588),
GIMT_Encode4(14861),
GIMT_Encode4(17594),
GIMT_Encode4(17801),
GIM_Try, GIMT_Encode4(14587),
GIM_RootCheckType, 1, GILLT_s1,
GIM_RootCheckType, 2, GILLT_s1,
GIM_Try, GIMT_Encode4(14508),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOT_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14536),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOT_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14561),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(14586),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(14860),
GIM_RootCheckType, 1, GILLT_s16,
GIM_RootCheckType, 2, GILLT_s16,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_Try, GIMT_Encode4(14642),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_isGFX8GFX9),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckI64ImmPredicate, 1, GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineIntConst16),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_U16_e64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_CustomRenderer, 0, 1, GIMT_Encode2(GICR_renderNegateImm),
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14690),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckI64ImmPredicate, 1, GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineIntConst16),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_NC_U16_e64),
GIR_RootToRootCopy, 0,
GIR_AddImm8, 0, 0,
GIR_RootToRootCopy, 1,
GIR_AddImm8, 0, 0,
GIR_CustomRenderer, 0, 1, GIMT_Encode2(GICR_renderNegateImm),
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14736),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_isGFX6GFX7GFX8GFX9),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s16,
GIM_CheckType, 1, 2, GILLT_s16,
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_MAD_U16_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14782),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_isGFX6GFX7GFX8GFX9),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s16,
GIM_CheckType, 1, 2, GILLT_s16,
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_MAD_U16_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14837),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_ADD_NC_U16_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(14859),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX8GFX9),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_ADD_U16_e64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(17593),
GIM_RootCheckType, 1, GILLT_s32,
GIM_RootCheckType, 2, GILLT_s32,
GIM_Try, GIMT_Encode4(14965),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordNamedOperand, 2, 1, 1,
GIM_RecordInsn, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckCxxInsnPredicate, 2, GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
GIM_CheckIsSafeToFold, 3,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 2, 1,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15058),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordNamedOperand, 2, 1, 1,
GIM_RecordInsn, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckCxxInsnPredicate, 2, GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
GIM_CheckIsSafeToFold, 3,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 2, 1,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15123),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 1,
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl1_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL1_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15188),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 2,
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl2_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL2_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15253),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 3,
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl3_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL3_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15318),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 4,
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl4_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL4_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15383),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl1_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL1_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15448),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl2_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL2_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15513),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 3,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl3_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL3_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15578),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckRegBankForClass, 1, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 1, 2, 4,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_shl4_add),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_LSHL4_ADD_U32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15743),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 1,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15908),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 1,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(15973),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_ADD),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24881),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_ADD3_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16138),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 2,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16303),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 2,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16464),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 2,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16625),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 2,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16690),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16755),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24894),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XAD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16820),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_ADD),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24881),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_ADD3_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(16981),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 1,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17142),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 2, GILLT_s64,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
GIR_AddTempRegister, 3, 3, GIMT_Encode2(RegState::Define),
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_AddTempRegister, 2, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 0, 1,
GIR_AddImm8, 2, 3,
GIR_AddSimpleTempRegister, 2, 3,
GIR_AddImm8, 2, 11,
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
GIR_ConstrainOperandRC, 2, 3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
GIR_MakeTempReg, 1, GILLT_s1,
GIR_MakeTempReg, 0, GILLT_s64,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddTempRegister, 1, 1, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_Copy, 1, 1, 1,
GIR_Copy, 1, 1, 2,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddImm8, 1, 0,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::COPY),
GIR_RootToRootCopy, 0,
GIR_AddTempSubRegister, 0, 0, GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17207),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17272),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24894),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XAD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17316),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckI64ImmPredicate, 1, GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineConst32),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18637),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_SUB_I32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_CustomRenderer, 0, 1, GIMT_Encode2(GICR_renderNegateImm),
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17363),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasAddNoCarryInsts),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckI64ImmPredicate, 1, GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineConst32),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_U32_e64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_CustomRenderer, 0, 1, GIMT_Encode2(GICR_renderNegateImm),
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17418),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_NotHasAddNoCarryInsts),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckI64ImmPredicate, 1, GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineConst32),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
GIM_CheckIsSafeToFold, 1,
GIR_MakeTempReg, 0, GILLT_s1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_CO_U32_e64),
GIR_RootToRootCopy, 0,
GIR_AddTempRegister, 0, 0, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_RootToRootCopy, 1,
GIR_CustomRenderer, 0, 1, GIMT_Encode2(GICR_renderNegateImm),
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17452),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18637),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_ADD_I32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(17482),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasAddNoCarryInsts),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_ADD_U32_e64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17520),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX6GFX7GFX8GFX9),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
GIR_MakeTempReg, 0, GILLT_s1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_ADD_CO_U32_e64),
GIR_RootToRootCopy, 0,
GIR_AddTempRegister, 0, 0, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_RootToRootCopy, 1,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17556),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CTPOP),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BCNT_U32_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17592),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_CTPOP),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BCNT_U32_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(17800),
GIM_RootCheckType, 1, GILLT_s64,
GIM_RootCheckType, 2, GILLT_s64,
GIM_Try, GIMT_Encode4(17674),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX940Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 1, GIMT_Encode2(GICXXPred_MI_Predicate_shl_0_to_4),
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24898),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_ADD_U64_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17743),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX940Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 1, GIMT_Encode2(GICXXPred_MI_Predicate_shl_0_to_4),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24898),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_ADD_U64_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(17773),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_ADD_U64_PSEUDO),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::VCC), GIMT_Encode2(RegState::Dead),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(17799),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18637),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_ADD_U64_PSEUDO),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18070),
GIM_RootCheckType, 1, GILLT_v2s16,
GIM_RootCheckType, 2, GILLT_v2s16,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_Try, GIMT_Encode4(17912),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_v2s16,
GIM_CheckType, 1, 2, GILLT_v2s16,
GIM_CheckIsSafeToFold, 1,
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 1, 1, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 1, 2, GIMT_Encode2(2), GIMT_Encode2(GICP_gi_vop3pmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_PK_MAD_U16),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(2), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(2), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18008),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_MUL),
GIM_CheckType, 1, 1, GILLT_v2s16,
GIM_CheckType, 1, 2, GILLT_v2s16,
GIM_CheckIsSafeToFold, 1,
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(2), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 1, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 1, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_PK_MAD_U16),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(2), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(2), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18069),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_PK_ADD_U16),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Reject,
GIM_SwitchType, 0, 0, GIMT_Encode2(7), GIMT_Encode2(12), GIMT_Encode4(18572),
GIMT_Encode4(18103),
GIMT_Encode4(18222),
GIMT_Encode4(18316),
GIMT_Encode4(18431),
GIMT_Encode4(18500),
GIM_Try, GIMT_Encode4(18221),
GIM_RootCheckType, 1, GILLT_s1,
GIM_RootCheckType, 2, GILLT_s1,
GIM_Try, GIMT_Encode4(18142),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOT_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18170),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOT_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18195),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(18220),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18315),
GIM_RootCheckType, 1, GILLT_s16,
GIM_RootCheckType, 2, GILLT_s16,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_Try, GIMT_Encode4(18292),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_NC_U16_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18314),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX8GFX9),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_U16_e64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18430),
GIM_RootCheckType, 1, GILLT_s32,
GIM_RootCheckType, 2, GILLT_s32,
GIM_Try, GIMT_Encode4(18361),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 1, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RootCheckRegBankForClass, 2, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18638),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_SUB_I32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(18391),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasAddNoCarryInsts),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24004),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_U32_e64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18429),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX6GFX7GFX8GFX9),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24004),
GIR_MakeTempReg, 0, GILLT_s1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_SUB_CO_U32_e64),
GIR_RootToRootCopy, 0,
GIR_AddTempRegister, 0, 0, GIMT_Encode2(RegState::Define|RegState::Dead),
GIR_RootToRootCopy, 1,
GIR_RootToRootCopy, 2,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18499),
GIM_RootCheckType, 1, GILLT_s64,
GIM_RootCheckType, 2, GILLT_s64,
GIM_Try, GIMT_Encode4(18472),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24004),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_SUB_U64_PSEUDO),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::VCC), GIMT_Encode2(RegState::Dead),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(18498),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18638),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_SUB_U64_PSEUDO),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18571),
GIM_RootCheckType, 1, GILLT_v2s16,
GIM_RootCheckType, 2, GILLT_v2s16,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_PK_SUB_I16),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_SwitchType, 0, 0, GIMT_Encode2(8), GIMT_Encode2(12), GIMT_Encode4(18858),
GIMT_Encode4(18600),
GIMT_Encode4(18655),
GIMT_Encode4(18756),
GIMT_Encode4(18786),
GIM_Try, GIMT_Encode4(18654),
GIM_RootCheckType, 1, GILLT_s16,
GIM_RootCheckType, 2, GILLT_s16,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_Try, GIMT_Encode4(18634),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_NotHasTrue16BitInsts),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_MUL_LO_U16_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(18653),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasTrue16BitInsts),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_MUL_LO_U16_t16_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18755),
GIM_RootCheckType, 1, GILLT_s32,
GIM_RootCheckType, 2, GILLT_s32,
GIM_Try, GIMT_Encode4(18686),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18645),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_MUL_I32),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(18720),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24493),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3mods0),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_MUL_LO_U32_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(18754),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24493),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3mods0),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_MUL_LO_U32_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(18785),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX12Plus),
GIM_RootCheckType, 1, GILLT_s64,
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18645),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_MUL_U64),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Try, GIMT_Encode4(18857),
GIM_RootCheckType, 1, GILLT_v2s16,
GIM_RootCheckType, 2, GILLT_v2s16,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_PK_MUL_LO_U16),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_SwitchType, 0, 0, GIMT_Encode2(7), GIMT_Encode2(17), GIMT_Encode4(19870),
GIMT_Encode4(18910),
GIMT_Encode4(18973),
GIMT_Encode4(19187),
GIMT_Encode4(19348),
GIMT_Encode4(19473),
GIMT_Encode4(19624), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(19747),
GIM_Try, GIMT_Encode4(18972),
GIM_RootCheckType, 1, GILLT_s1,
GIM_RootCheckType, 2, GILLT_s1,
GIM_Try, GIMT_Encode4(18946),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_AND_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(18971),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_AND_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(19186),
GIM_RootCheckType, 1, GILLT_s16,
GIM_RootCheckType, 2, GILLT_s16,
GIM_Try, GIMT_Encode4(19035),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s16,
GIM_CheckType, 1, 2, GILLT_s16,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19086),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s16,
GIM_CheckType, 1, 2, GILLT_s16,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19106),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_AND_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(19162),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_16RegClassID),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_AND_B16_t16_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19185),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_AND_B16_fake16_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(19347),
GIM_RootCheckType, 1, GILLT_s32,
GIM_RootCheckType, 2, GILLT_s32,
GIM_Try, GIMT_Encode4(19249),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19300),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19326),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_AND_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(19346),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_AND_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(19472),
GIM_RootCheckType, 1, GILLT_s64,
GIM_RootCheckType, 2, GILLT_s64,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_Try, GIMT_Encode4(19410),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19453),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19471),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_AND_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(19623),
GIM_RootCheckType, 1, GILLT_v2s16,
GIM_RootCheckType, 2, GILLT_v2s16,
GIM_Try, GIMT_Encode4(19543),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s16,
GIM_CheckType, 1, 2, GILLT_v2s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19602),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s16,
GIM_CheckType, 1, 2, GILLT_v2s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19622),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_AND_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(19746),
GIM_RootCheckType, 1, GILLT_v2s32,
GIM_RootCheckType, 2, GILLT_v2s32,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_Try, GIMT_Encode4(19694),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s32,
GIM_CheckType, 1, 2, GILLT_v2s32,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19745),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s32,
GIM_CheckType, 1, 2, GILLT_v2s32,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(19869),
GIM_RootCheckType, 1, GILLT_v4s16,
GIM_RootCheckType, 2, GILLT_v4s16,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_Try, GIMT_Encode4(19817),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v4s16,
GIM_CheckType, 1, 2, GILLT_v4s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(19868),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v4s16,
GIM_CheckType, 1, 2, GILLT_v4s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ANDN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Reject,
GIM_SwitchType, 0, 0, GIMT_Encode2(7), GIMT_Encode2(17), GIMT_Encode4(34394),
GIMT_Encode4(19922),
GIMT_Encode4(19985),
GIMT_Encode4(20199),
GIMT_Encode4(24808),
GIMT_Encode4(33997),
GIMT_Encode4(34148), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
GIMT_Encode4(34271),
GIM_Try, GIMT_Encode4(19984),
GIM_RootCheckType, 1, GILLT_s1,
GIM_RootCheckType, 2, GILLT_s1,
GIM_Try, GIMT_Encode4(19958),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_OR_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(19983),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_OR_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(20198),
GIM_RootCheckType, 1, GILLT_s16,
GIM_RootCheckType, 2, GILLT_s16,
GIM_Try, GIMT_Encode4(20047),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s16,
GIM_CheckType, 1, 2, GILLT_s16,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20098),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s16,
GIM_CheckType, 1, 2, GILLT_s16,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20118),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_OR_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(20174),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_16RegClassID),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_OR_B16_t16_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20197),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_OR_B16_fake16_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(24807),
GIM_RootCheckType, 1, GILLT_s32,
GIM_RootCheckType, 2, GILLT_s32,
GIM_Try, GIMT_Encode4(20348),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 3, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 2, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20486),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 1, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 3, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 2, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20624),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 3, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 2, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20762),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 1, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 3, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 2, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(20900),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21038),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21176),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21314),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21407),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordNamedOperand, 2, 1, 1,
GIM_RecordInsn, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckCxxInsnPredicate, 2, GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
GIM_CheckIsSafeToFold, 3,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 2, 1,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21500),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordNamedOperand, 2, 1, 1,
GIM_RecordInsn, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
GIM_CheckCxxInsnPredicate, 2, GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
GIM_CheckIsSafeToFold, 3,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 2, 1,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21673),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(21846),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(22019),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(22192),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(22365),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(22538),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(22711),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(22884),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 1, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 2, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 1, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(23057),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(23230),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(23403),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(23576),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 2,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(23749),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(23922),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 2,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24095),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24268),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_MakeTempReg, 3, GILLT_s16,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_Copy, 5, 2, 1,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_Copy, 4, 1, 1,
GIR_ConstrainSelectedInstOperands, 4,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 1, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24319),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24370),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24435),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24888),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_AND_OR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24500),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24891),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_OR3_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24565),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24630),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24888),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_AND_OR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24695),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24891),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_OR3_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24760),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_SHL),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(24786),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_OR_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(24806),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_OR_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(33996),
GIM_RootCheckType, 1, GILLT_s64,
GIM_RootCheckType, 2, GILLT_s64,
GIM_Try, GIMT_Encode4(25134),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 3, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 3, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(25449),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 1, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 3, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 3, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(25764),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 3, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 3, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(26079),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckConstantInt8, 2, 2, uint8_t(-1),
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 3, 1, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 3, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 3, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(26394),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(26709),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(27024),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(27339),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 3, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckConstantInt8, 3, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(27747),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(28155),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(28563),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(28971),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(29379),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(29787),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(30195),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(30603),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 0, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 2, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(31011),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(31419),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(31827),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(32235),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 1,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(32643),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(33051),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(33459),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 2,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(33867),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 0, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 3, 2, 2,
GIM_CheckOpcode, 3, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckIsSameOperandIgnoreCopies, 3, 1, 1, 1,
GIM_CheckIsSameOperandIgnoreCopies, 3, 2, 1, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
GIM_CheckIsSafeToFold, 3,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s32,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s16,
GIR_MakeTempReg, 7, GILLT_s16,
GIR_MakeTempReg, 8, GILLT_s32,
GIR_MakeTempReg, 9, GILLT_s32,
GIR_MakeTempReg, 10, GILLT_s32,
GIR_MakeTempReg, 11, GILLT_s32,
GIR_BuildMI, 12, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 12, 11, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 12, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 12, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 12, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 11, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 11, 10, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 11, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 11, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 11, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 10, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 10, 9, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 10, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 10, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 10, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 9, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 9, 8, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 9, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 9, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 9, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 8, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 8, 8,
GIR_AddSimpleTempRegister, 8, 9,
GIR_ConstrainSelectedInstOperands, 8,
GIR_BuildMI, 7, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 7, 7,
GIR_AddSimpleTempRegister, 7, 10,
GIR_AddSimpleTempRegister, 7, 11,
GIR_ConstrainSelectedInstOperands, 7,
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 5, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 5, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 5, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 2, 2,
GIR_AddSimpleTempRegister, 2, 3,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 4,
GIR_AddSimpleTempRegister, 1, 5,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 6,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(33918),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(33969),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(33995),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_OR_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(34147),
GIM_RootCheckType, 1, GILLT_v2s16,
GIM_RootCheckType, 2, GILLT_v2s16,
GIM_Try, GIMT_Encode4(34067),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s16,
GIM_CheckType, 1, 2, GILLT_v2s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34126),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s16,
GIM_CheckType, 1, 2, GILLT_v2s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34146),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_OR_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(34270),
GIM_RootCheckType, 1, GILLT_v2s32,
GIM_RootCheckType, 2, GILLT_v2s32,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_Try, GIMT_Encode4(34218),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s32,
GIM_CheckType, 1, 2, GILLT_v2s32,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34269),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v2s32,
GIM_CheckType, 1, 2, GILLT_v2s32,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(34393),
GIM_RootCheckType, 1, GILLT_v4s16,
GIM_RootCheckType, 2, GILLT_v4s16,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_Try, GIMT_Encode4(34341),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v4s16,
GIM_CheckType, 1, 2, GILLT_v4s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 2,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34392),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_v4s16,
GIM_CheckType, 1, 2, GILLT_v4s16,
GIM_RecordInsn, 2, 1, 2,
GIM_CheckOpcodeIsEither, 2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
GIM_CheckIsBuildVectorAllOnes, 2,
GIM_CheckIsSafeToFold, 2,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_ORN2_B64),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_Copy, 0, 1, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Reject,
GIM_Reject,
GIM_Reject,
GIM_SwitchType, 0, 0, GIMT_Encode2(7), GIMT_Encode2(12), GIMT_Encode4(38818),
GIMT_Encode4(34426),
GIMT_Encode4(34489),
GIMT_Encode4(34601),
GIMT_Encode4(36158),
GIMT_Encode4(38791),
GIM_Try, GIMT_Encode4(34488),
GIM_RootCheckType, 1, GILLT_s1,
GIM_RootCheckType, 2, GILLT_s1,
GIM_Try, GIMT_Encode4(34462),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B64),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(34487),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(34600),
GIM_RootCheckType, 1, GILLT_s16,
GIM_RootCheckType, 2, GILLT_s16,
GIM_Try, GIMT_Encode4(34520),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(34576),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_16RegClassID),
GIM_CheckComplexPattern, 0, 1, GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
GIM_CheckComplexPattern, 0, 2, GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XOR_B16_t16_e64),
GIR_RootToRootCopy, 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(0), 0,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 1,
GIR_ComplexSubOperandRenderer, 0, GIMT_Encode2(1), 0,
GIR_AddImm8, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34599),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_XOR_B16_fake16_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(36157),
GIM_RootCheckType, 1, GILLT_s32,
GIM_Try, GIMT_Encode4(34667),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NAND_B32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34725),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOR_B32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34783),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_XNOR_B32),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(34905),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35027),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35149),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 2,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35271),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 2, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35392),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 2, 1, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 0, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35513),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 2, 2, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 0, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 2,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35634),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 2, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 2, 1, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 0, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 2,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35755),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s32,
GIM_CheckIsSameOperandIgnoreCopies, 2, 2, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s16,
GIR_MakeTempReg, 2, GILLT_s16,
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_Copy, 3, 0, 1,
GIR_ConstrainSelectedInstOperands, 3,
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_Copy, 2, 2, 1,
GIR_ConstrainSelectedInstOperands, 2,
GIR_BuildMI, 1, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_Copy, 1, 1, 1,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddSimpleTempRegister, 0, 1,
GIR_AddSimpleTempRegister, 0, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35787),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOT_B32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35816),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_NOT_B32_e32),
GIR_RootToRootCopy, 0,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35884),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_RecordNamedOperand, 0, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24940),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XOR3_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35952),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordNamedOperand, 0, 1, 2,
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckHasOneUse, 1,
GIM_RecordNamedOperand, 1, 1, 0,
GIM_RecordNamedOperand, 1, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24940),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XOR3_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(35981),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_32RegClassID),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18648),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::S_XOR_B32),
GIR_AddImplicitDef, 0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Try, GIMT_Encode4(36031),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasDLInsts),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XNOR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36083),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasDLInsts),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XNOR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36133),
GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasDLInsts),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIM_RecordInsn, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s32,
GIM_CheckType, 1, 2, GILLT_s32,
GIM_CheckConstantInt8, 1, 2, uint8_t(-1),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::V_XNOR_B32_e64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_RootToRootCopy, 1,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36156),
GIM_RootCheckType, 2, GILLT_s32,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_MutateOpcode, 0, 0, GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
GIR_AddImplicitUse, 0, GIMT_Encode2(AMDGPU::EXEC),
GIR_RootConstrainSelectedInstOperands,
GIR_Done,
GIM_Reject,
GIM_Reject,
GIM_Try, GIMT_Encode4(38790),
GIM_RootCheckType, 1, GILLT_s64,
GIM_Try, GIMT_Encode4(36224),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckHasOneUse, 1,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NAND_B64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36282),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_OR),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckHasOneUse, 1,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_NOR_B64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36340),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
GIM_RecordInsn, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_CheckHasOneUse, 1,
GIM_CheckConstantInt8, 0, 2, uint8_t(-1),
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
GIM_CheckIsSafeToFold, 1,
GIR_BuildRootMI, GIMT_Encode2(AMDGPU::S_XNOR_B64),
GIR_RootToRootCopy, 0,
GIR_Copy, 0, 1, 1,
GIR_Copy, 0, 1, 2,
GIR_SetImplicitDefDead, 0, 0,
GIR_RootConstrainSelectedInstOperands,
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36639),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(36938),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(37237),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 2,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(37536),
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 1,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 0, 2, 2, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(37834),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 2, 1, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 0, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 0, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(38132),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 1,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 2, 2, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 0, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 0, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(38430),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 2, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 2, 1, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 0, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 2, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 0, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 2, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 3, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 2, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 2, 1, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 2, 1, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 2, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 2, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 1, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 1, 0, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 1, 1,
GIR_AddSimpleTempRegister, 1, 2,
GIR_AddSimpleTempRegister, 1, 3,
GIR_ConstrainSelectedInstOperands, 1,
GIR_BuildRootMI, GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
GIR_RootToRootCopy, 0,
GIR_AddSimpleTempRegister, 0, 0,
GIR_AddImm8, 0, 3,
GIR_AddSimpleTempRegister, 0, 4,
GIR_AddImm8, 0, 11,
GIR_ConstrainOperandRC, 0, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_ConstrainOperandRC, 0, 1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 0, 3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_EraseRootFromParent_Done,
GIM_Try, GIMT_Encode4(38728),
GIM_RootCheckType, 2, GILLT_s64,
GIM_RootCheckRegBankForClass, 0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIM_RecordInsnIgnoreCopies, 1, 0, 2,
GIM_CheckOpcode, 1, GIMT_Encode2(TargetOpcode::G_AND),
GIM_CheckType, 1, 1, GILLT_s64,
GIM_CheckType, 1, 2, GILLT_s64,
GIM_RecordInsnIgnoreCopies, 2, 1, 2,
GIM_CheckOpcode, 2, GIMT_Encode2(TargetOpcode::G_XOR),
GIM_CheckType, 2, 1, GILLT_s64,
GIM_CheckIsSameOperandIgnoreCopies, 2, 2, 0, 1,
GIM_CheckCxxInsnPredicate, 0, GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
GIM_CheckIsSafeToFold, 2,
GIR_MakeTempReg, 0, GILLT_s16,
GIR_MakeTempReg, 1, GILLT_s32,
GIR_MakeTempReg, 2, GILLT_s32,
GIR_MakeTempReg, 3, GILLT_s32,
GIR_MakeTempReg, 4, GILLT_s16,
GIR_MakeTempReg, 5, GILLT_s32,
GIR_MakeTempReg, 6, GILLT_s32,
GIR_MakeTempReg, 7, GILLT_s32,
GIR_BuildMI, 8, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 8, 7, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 8, 0, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 8, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 8, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 7, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 7, 6, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 7, 2, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 7, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 7, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 6, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 6, 5, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 6, 1, 1, GIMT_Encode2(11),
GIR_ConstrainOperandRC, 6, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 6, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 5, GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
GIR_AddTempRegister, 5, 4, GIMT_Encode2(RegState::Define),
GIR_AddSimpleTempRegister, 5, 5,
GIR_AddSimpleTempRegister, 5, 6,
GIR_AddSimpleTempRegister, 5, 7,
GIR_ConstrainSelectedInstOperands, 5,
GIR_BuildMI, 4, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 4, 3, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 4, 0, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 4, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
GIR_ConstrainOperandRC, 4, 1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
GIR_BuildMI, 3, GIMT_Encode2(TargetOpcode::COPY),
GIR_AddTempRegister, 3, 2, GIMT_Encode2(RegState::Define),
GIR_CopySubReg, 3, 2, 1, GIMT_Encode2(3),
GIR_ConstrainOperandRC, 3, 0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID)#undef GIMT_Encode2#undef GIMT_Encode4#undef GIMT_Encode8#endif #ifdef GET_GLOBALISEL_PREDICATES_DECL#endif #ifdef GET_GLOBALISEL_PREDICATES_INIT#endif