llvm/lib/Target/AMDGPU/AMDGPUGenGlobalISel.inc

/*===- TableGen'erated file -------------------------------------*- C++ -*-===*\
|*                                                                            *|
|* Global Instruction Selector for the AMDGPU target                          *|
|*                                                                            *|
|* Automatically generated file, do not edit!                                 *|
|*                                                                            *|
\*===----------------------------------------------------------------------===*/

#ifdef GET_GLOBALISEL_PREDICATE_BITSET
const unsigned MAX_SUBTARGET_PREDICATES =;
PredicateBitset;
#endif // ifdef GET_GLOBALISEL_PREDICATE_BITSET

#ifdef GET_GLOBALISEL_TEMPORARIES_DECL
  mutable MatcherState State;
  typedef ComplexRendererFns(AMDGPUInstructionSelector::*ComplexMatcherMemFn)(MachineOperand &) const;
  typedef void(AMDGPUInstructionSelector::*CustomRendererFn)(MachineInstrBuilder &, const MachineInstr &, int) const;
  const ExecInfoTy<PredicateBitset, ComplexMatcherMemFn, CustomRendererFn> ExecInfo;
  static AMDGPUInstructionSelector::ComplexMatcherMemFn ComplexPredicateFns[];
  static AMDGPUInstructionSelector::CustomRendererFn CustomRenderers[];
  bool testImmPredicate_I64(unsigned PredicateID, int64_t Imm) const override;
  bool testImmPredicate_APInt(unsigned PredicateID, const APInt &Imm) const override;
  bool testImmPredicate_APFloat(unsigned PredicateID, const APFloat &Imm) const override;
  const uint8_t *getMatchTable() const override;
  bool testMIPredicate_MI(unsigned PredicateID, const MachineInstr &MI, const MatcherState &State) const override;
  bool testSimplePredicate(unsigned PredicateID) const override;
  bool runCustomAction(unsigned FnID, const MatcherState &State, NewMIVector &OutMIs) const override;
#endif // ifdef GET_GLOBALISEL_TEMPORARIES_DECL

#ifdef GET_GLOBALISEL_TEMPORARIES_INIT
, State(6),
ExecInfo(TypeObjects, NumTypeObjects, FeatureBitsets, ComplexPredicateFns, CustomRenderers)
#endif // ifdef GET_GLOBALISEL_TEMPORARIES_INIT

#ifdef GET_GLOBALISEL_IMPL
// LLT Objects.
enum {
  GILLT_p0s64,
  GILLT_p1s64,
  GILLT_p2s32,
  GILLT_p3s32,
  GILLT_p4s64,
  GILLT_p5s32,
  GILLT_p6s32,
  GILLT_s1,
  GILLT_s16,
  GILLT_s32,
  GILLT_s64,
  GILLT_v2s16,
  GILLT_v2s32,
  GILLT_v2s64,
  GILLT_v3s32,
  GILLT_v3s64,
  GILLT_v4s16,
  GILLT_v4s32,
  GILLT_v4s64,
  GILLT_v5s32,
  GILLT_v6s32,
  GILLT_v7s32,
  GILLT_v8s16,
  GILLT_v8s32,
  GILLT_v8s64,
  GILLT_v9s32,
  GILLT_v10s32,
  GILLT_v11s32,
  GILLT_v12s32,
  GILLT_v16s16,
  GILLT_v16s32,
  GILLT_v16s64,
  GILLT_v32s16,
  GILLT_v32s32,
};
const static size_t NumTypeObjects = 34;
const static LLT TypeObjects[] = {
  LLT::pointer(0, 64),
  LLT::pointer(1, 64),
  LLT::pointer(2, 32),
  LLT::pointer(3, 32),
  LLT::pointer(4, 64),
  LLT::pointer(5, 32),
  LLT::pointer(6, 32),
  LLT::scalar(1),
  LLT::scalar(16),
  LLT::scalar(32),
  LLT::scalar(64),
  LLT::vector(ElementCount::getFixed(2), 16),
  LLT::vector(ElementCount::getFixed(2), 32),
  LLT::vector(ElementCount::getFixed(2), 64),
  LLT::vector(ElementCount::getFixed(3), 32),
  LLT::vector(ElementCount::getFixed(3), 64),
  LLT::vector(ElementCount::getFixed(4), 16),
  LLT::vector(ElementCount::getFixed(4), 32),
  LLT::vector(ElementCount::getFixed(4), 64),
  LLT::vector(ElementCount::getFixed(5), 32),
  LLT::vector(ElementCount::getFixed(6), 32),
  LLT::vector(ElementCount::getFixed(7), 32),
  LLT::vector(ElementCount::getFixed(8), 16),
  LLT::vector(ElementCount::getFixed(8), 32),
  LLT::vector(ElementCount::getFixed(8), 64),
  LLT::vector(ElementCount::getFixed(9), 32),
  LLT::vector(ElementCount::getFixed(10), 32),
  LLT::vector(ElementCount::getFixed(11), 32),
  LLT::vector(ElementCount::getFixed(12), 32),
  LLT::vector(ElementCount::getFixed(16), 16),
  LLT::vector(ElementCount::getFixed(16), 32),
  LLT::vector(ElementCount::getFixed(16), 64),
  LLT::vector(ElementCount::getFixed(32), 16),
  LLT::vector(ElementCount::getFixed(32), 32),
};

// Bits for subtarget features that participate in instruction matching.
enum SubtargetFeatureBits : uint8_t {
  Feature_FalsePredicateBit = 100,
  Feature_isGFX6Bit = 41,
  Feature_isGFX6GFX7Bit = 8,
  Feature_isGFX6GFX7GFX10Bit = 98,
  Feature_isGFX7OnlyBit = 71,
  Feature_isGFX7GFX8GFX9Bit = 36,
  Feature_isGFX6GFX7GFX8GFX9Bit = 56,
  Feature_isGFX6GFX7GFX8GFX9GFX10Bit = 111,
  Feature_isNotGFX12PlusBit = 19,
  Feature_isGFX8GFX9GFX10GFX11Bit = 20,
  Feature_isGFX7PlusBit = 13,
  Feature_isGFX8PlusBit = 4,
  Feature_isGFX8OnlyBit = 21,
  Feature_isGFX9PlusBit = 0,
  Feature_isNotGFX9PlusBit = 46,
  Feature_isGFX9OnlyBit = 49,
  Feature_isGFX90APlusBit = 32,
  Feature_isNotGFX90APlusBit = 22,
  Feature_isGFX908orGFX90ABit = 31,
  Feature_isGFX940PlusBit = 33,
  Feature_isGFX8GFX9Bit = 15,
  Feature_isGFX10OnlyBit = 52,
  Feature_isGFX10PlusBit = 5,
  Feature_isGFX9GFX10Bit = 47,
  Feature_isGFX11OnlyBit = 53,
  Feature_isGFX11PlusBit = 1,
  Feature_isGFX12OnlyBit = 54,
  Feature_isGFX12PlusBit = 3,
  Feature_HasFlatAddressSpaceBit = 73,
  Feature_HasFlatBufferGlobalAtomicFaddF64InstBit = 89,
  Feature_HasAtomicFMinFMaxF32GlobalInstsBit = 83,
  Feature_HasAtomicFMinFMaxF64GlobalInstsBit = 88,
  Feature_HasAtomicFMinFMaxF32FlatInstsBit = 79,
  Feature_HasAtomicFMinFMaxF64FlatInstsBit = 80,
  Feature_HasLdsAtomicAddF64Bit = 114,
  Feature_HasFlatGlobalInstsBit = 74,
  Feature_HasFlatScratchInstsBit = 77,
  Feature_HasD16LoadStoreBit = 82,
  Feature_HasFlatScratchSVSModeBit = 78,
  Feature_HasGFX10_BEncodingBit = 40,
  Feature_HasUnpackedD16VMemBit = 94,
  Feature_HasPackedD16VMemBit = 95,
  Feature_HasUnrestrictedSOffsetBit = 93,
  Feature_D16PreservesUnusedBitsBit = 75,
  Feature_LDSRequiresM0InitBit = 107,
  Feature_NotLDSRequiresM0InitBit = 108,
  Feature_HasExportInstsBit = 7,
  Feature_HasLDSFPAtomicAddF32Bit = 113,
  Feature_HasAddNoCarryInstsBit = 55,
  Feature_NotHasAddNoCarryInstsBit = 105,
  Feature_HasXNACKEnabledBit = 70,
  Feature_Has16BitInstsBit = 9,
  Feature_HasTrue16BitInstsBit = 14,
  Feature_NotHasTrue16BitInstsBit = 10,
  Feature_UseRealTrue16InstsBit = 11,
  Feature_UseFakeTrue16InstsBit = 12,
  Feature_HasVOP3PInstsBit = 64,
  Feature_HasMed3_16Bit = 106,
  Feature_HasMinMaxDenormModesBit = 99,
  Feature_NotHasMinMaxDenormModesBit = 101,
  Feature_HasPackedFP32OpsBit = 34,
  Feature_HasImageInstsBit = 6,
  Feature_HasIntClampBit = 61,
  Feature_HasMadMixInstsBit = 65,
  Feature_HasScalarStoresBit = 37,
  Feature_has16BankLDSBit = 42,
  Feature_has32BankLDSBit = 23,
  Feature_HasFmaMixInstsBit = 67,
  Feature_HasDLInstsBit = 16,
  Feature_HasFmacF64InstBit = 104,
  Feature_HasDot1InstsBit = 29,
  Feature_HasDot2InstsBit = 26,
  Feature_HasDot3InstsBit = 60,
  Feature_HasDot4InstsBit = 59,
  Feature_HasDot5InstsBit = 57,
  Feature_HasDot6InstsBit = 58,
  Feature_HasDot7InstsBit = 28,
  Feature_HasDot8InstsBit = 68,
  Feature_HasDot9InstsBit = 24,
  Feature_HasDot10InstsBit = 27,
  Feature_HasDot11InstsBit = 69,
  Feature_HasGetWaveIdInstBit = 39,
  Feature_HasMAIInstsBit = 30,
  Feature_HasSMemRealTimeBit = 38,
  Feature_HasSMemTimeInstBit = 35,
  Feature_HasShaderCyclesRegisterBit = 72,
  Feature_HasShaderCyclesHiLoRegistersBit = 45,
  Feature_HasFP8ConversionInstsBit = 51,
  Feature_HasMadMacF32InstsBit = 17,
  Feature_HasFmaLegacy32Bit = 18,
  Feature_HasAtomicDsPkAdd16InstsBit = 112,
  Feature_HasAtomicFlatPkAdd16InstsBit = 91,
  Feature_HasAtomicFaddRtnInstsBit = 86,
  Feature_HasAtomicFaddNoRtnInstsBit = 84,
  Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit = 85,
  Feature_HasAtomicBufferGlobalPkAddF16InstsBit = 87,
  Feature_HasAtomicGlobalPkAddBF16InstBit = 92,
  Feature_HasAtomicBufferPkAddBF16InstBit = 96,
  Feature_HasFlatAtomicFaddF32InstBit = 90,
  Feature_EnableFlatScratchBit = 76,
  Feature_DisableFlatScratchBit = 97,
  Feature_HasUnalignedAccessModeBit = 109,
  Feature_HasMADIntraFwdBugBit = 63,
  Feature_HasNotMADIntraFwdBugBit = 62,
  Feature_HasSALUFloatInstsBit = 2,
  Feature_HasPseudoScalarTransBit = 25,
  Feature_HasGDSBit = 110,
  Feature_HasCvtFP8VOP1BugBit = 48,
  Feature_HasNoCvtFP8VOP1BugBit = 50,
  Feature_HasAtomicCSubNoRtnInstsBit = 81,
  Feature_FP16DenormalsBit = 103,
  Feature_FP64DenormalsBit = 102,
  Feature_NoFP32DenormalsBit = 66,
  Feature_isWave32Bit = 43,
  Feature_isWave64Bit = 44,
};

PredicateBitset AMDGPUInstructionSelector::
computeAvailableModuleFeatures(const AMDGPUSubtarget *Subtarget) const {
  PredicateBitset Features{};
  if (false)
    Features.set(Feature_FalsePredicateBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS)
    Features.set(Feature_isGFX6Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS)
    Features.set(Feature_isGFX6GFX7Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
    Features.set(Feature_isGFX6GFX7GFX10Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS)
    Features.set(Feature_isGFX7OnlyBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
    Features.set(Feature_isGFX7GFX8GFX9Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
    Features.set(Feature_isGFX6GFX7GFX8GFX9Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::SOUTHERN_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::SEA_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
    Features.set(Feature_isGFX6GFX7GFX8GFX9GFX10Bit);
  if (Subtarget->getGeneration() <= AMDGPUSubtarget::GFX11)
    Features.set(Feature_isNotGFX12PlusBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX11)
    Features.set(Feature_isGFX8GFX9GFX10GFX11Bit);
  if (Subtarget->getGeneration() >= AMDGPUSubtarget::SEA_ISLANDS)
    Features.set(Feature_isGFX7PlusBit);
  if (Subtarget->getGeneration() >= AMDGPUSubtarget::VOLCANIC_ISLANDS)
    Features.set(Feature_isGFX8PlusBit);
  if (Subtarget->getGeneration() ==AMDGPUSubtarget::VOLCANIC_ISLANDS)
    Features.set(Feature_isGFX8OnlyBit);
  if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX9)
    Features.set(Feature_isGFX9PlusBit);
  if (Subtarget->getGeneration() < AMDGPUSubtarget::GFX9)
    Features.set(Feature_isNotGFX9PlusBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
    Features.set(Feature_isGFX9OnlyBit);
  if (Subtarget->hasGFX90AInsts())
    Features.set(Feature_isGFX90APlusBit);
  if (!Subtarget->hasGFX90AInsts())
    Features.set(Feature_isNotGFX90APlusBit);
  if (Subtarget->hasMAIInsts() && !Subtarget->hasGFX940Insts())
    Features.set(Feature_isGFX908orGFX90ABit);
  if (Subtarget->hasGFX940Insts())
    Features.set(Feature_isGFX940PlusBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::VOLCANIC_ISLANDS ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX9)
    Features.set(Feature_isGFX8GFX9Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
    Features.set(Feature_isGFX10OnlyBit);
  if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX10)
    Features.set(Feature_isGFX10PlusBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX9 ||Subtarget->getGeneration() == AMDGPUSubtarget::GFX10)
    Features.set(Feature_isGFX9GFX10Bit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX11)
    Features.set(Feature_isGFX11OnlyBit);
  if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX11)
    Features.set(Feature_isGFX11PlusBit);
  if (Subtarget->getGeneration() == AMDGPUSubtarget::GFX12)
    Features.set(Feature_isGFX12OnlyBit);
  if (Subtarget->getGeneration() >= AMDGPUSubtarget::GFX12)
    Features.set(Feature_isGFX12PlusBit);
  if (Subtarget->hasFlatAddressSpace())
    Features.set(Feature_HasFlatAddressSpaceBit);
  if (Subtarget->hasFlatBufferGlobalAtomicFaddF64Inst())
    Features.set(Feature_HasFlatBufferGlobalAtomicFaddF64InstBit);
  if (Subtarget->hasAtomicFMinFMaxF32GlobalInsts())
    Features.set(Feature_HasAtomicFMinFMaxF32GlobalInstsBit);
  if (Subtarget->hasAtomicFMinFMaxF64GlobalInsts())
    Features.set(Feature_HasAtomicFMinFMaxF64GlobalInstsBit);
  if (Subtarget->hasAtomicFMinFMaxF32FlatInsts())
    Features.set(Feature_HasAtomicFMinFMaxF32FlatInstsBit);
  if (Subtarget->hasAtomicFMinFMaxF64FlatInsts())
    Features.set(Feature_HasAtomicFMinFMaxF64FlatInstsBit);
  if (Subtarget->hasLdsAtomicAddF64())
    Features.set(Feature_HasLdsAtomicAddF64Bit);
  if (Subtarget->hasFlatGlobalInsts())
    Features.set(Feature_HasFlatGlobalInstsBit);
  if (Subtarget->hasFlatScratchInsts())
    Features.set(Feature_HasFlatScratchInstsBit);
  if (Subtarget->hasD16LoadStore())
    Features.set(Feature_HasD16LoadStoreBit);
  if (Subtarget->hasFlatScratchSVSMode())
    Features.set(Feature_HasFlatScratchSVSModeBit);
  if (Subtarget->hasGFX10_BEncoding())
    Features.set(Feature_HasGFX10_BEncodingBit);
  if (Subtarget->hasUnpackedD16VMem())
    Features.set(Feature_HasUnpackedD16VMemBit);
  if (!Subtarget->hasUnpackedD16VMem())
    Features.set(Feature_HasPackedD16VMemBit);
  if (!Subtarget->hasRestrictedSOffset())
    Features.set(Feature_HasUnrestrictedSOffsetBit);
  if (Subtarget->d16PreservesUnusedBits())
    Features.set(Feature_D16PreservesUnusedBitsBit);
  if (Subtarget->ldsRequiresM0Init())
    Features.set(Feature_LDSRequiresM0InitBit);
  if (!Subtarget->ldsRequiresM0Init())
    Features.set(Feature_NotLDSRequiresM0InitBit);
  if (Subtarget->hasExportInsts())
    Features.set(Feature_HasExportInstsBit);
  if (Subtarget->hasLDSFPAtomicAddF32())
    Features.set(Feature_HasLDSFPAtomicAddF32Bit);
  if (Subtarget->hasAddNoCarry())
    Features.set(Feature_HasAddNoCarryInstsBit);
  if (!Subtarget->hasAddNoCarry())
    Features.set(Feature_NotHasAddNoCarryInstsBit);
  if (Subtarget->isXNACKEnabled())
    Features.set(Feature_HasXNACKEnabledBit);
  if (Subtarget->has16BitInsts())
    Features.set(Feature_Has16BitInstsBit);
  if (Subtarget->hasTrue16BitInsts())
    Features.set(Feature_HasTrue16BitInstsBit);
  if (!Subtarget->hasTrue16BitInsts())
    Features.set(Feature_NotHasTrue16BitInstsBit);
  if (Subtarget->useRealTrue16Insts())
    Features.set(Feature_UseRealTrue16InstsBit);
  if (Subtarget->hasTrue16BitInsts() && !Subtarget->useRealTrue16Insts())
    Features.set(Feature_UseFakeTrue16InstsBit);
  if (Subtarget->hasVOP3PInsts())
    Features.set(Feature_HasVOP3PInstsBit);
  if (Subtarget->hasMed3_16())
    Features.set(Feature_HasMed3_16Bit);
  if (Subtarget->supportsMinMaxDenormModes())
    Features.set(Feature_HasMinMaxDenormModesBit);
  if (!Subtarget->supportsMinMaxDenormModes())
    Features.set(Feature_NotHasMinMaxDenormModesBit);
  if (Subtarget->hasPackedFP32Ops())
    Features.set(Feature_HasPackedFP32OpsBit);
  if (Subtarget->hasImageInsts())
    Features.set(Feature_HasImageInstsBit);
  if (Subtarget->hasIntClamp())
    Features.set(Feature_HasIntClampBit);
  if (Subtarget->hasMadMixInsts())
    Features.set(Feature_HasMadMixInstsBit);
  if (Subtarget->hasScalarStores())
    Features.set(Feature_HasScalarStoresBit);
  if (Subtarget->getLDSBankCount() == 16)
    Features.set(Feature_has16BankLDSBit);
  if (Subtarget->getLDSBankCount() == 32)
    Features.set(Feature_has32BankLDSBit);
  if (Subtarget->hasFmaMixInsts())
    Features.set(Feature_HasFmaMixInstsBit);
  if (Subtarget->hasDLInsts())
    Features.set(Feature_HasDLInstsBit);
  if (Subtarget->hasFmacF64Inst())
    Features.set(Feature_HasFmacF64InstBit);
  if (Subtarget->hasDot1Insts())
    Features.set(Feature_HasDot1InstsBit);
  if (Subtarget->hasDot2Insts())
    Features.set(Feature_HasDot2InstsBit);
  if (Subtarget->hasDot3Insts())
    Features.set(Feature_HasDot3InstsBit);
  if (Subtarget->hasDot4Insts())
    Features.set(Feature_HasDot4InstsBit);
  if (Subtarget->hasDot5Insts())
    Features.set(Feature_HasDot5InstsBit);
  if (Subtarget->hasDot6Insts())
    Features.set(Feature_HasDot6InstsBit);
  if (Subtarget->hasDot7Insts())
    Features.set(Feature_HasDot7InstsBit);
  if (Subtarget->hasDot8Insts())
    Features.set(Feature_HasDot8InstsBit);
  if (Subtarget->hasDot9Insts())
    Features.set(Feature_HasDot9InstsBit);
  if (Subtarget->hasDot10Insts())
    Features.set(Feature_HasDot10InstsBit);
  if (Subtarget->hasDot11Insts())
    Features.set(Feature_HasDot11InstsBit);
  if (Subtarget->hasGetWaveIdInst())
    Features.set(Feature_HasGetWaveIdInstBit);
  if (Subtarget->hasMAIInsts())
    Features.set(Feature_HasMAIInstsBit);
  if (Subtarget->hasSMemRealTime())
    Features.set(Feature_HasSMemRealTimeBit);
  if (Subtarget->hasSMemTimeInst())
    Features.set(Feature_HasSMemTimeInstBit);
  if (Subtarget->hasShaderCyclesRegister())
    Features.set(Feature_HasShaderCyclesRegisterBit);
  if (Subtarget->hasShaderCyclesHiLoRegisters())
    Features.set(Feature_HasShaderCyclesHiLoRegistersBit);
  if (Subtarget->hasFP8ConversionInsts())
    Features.set(Feature_HasFP8ConversionInstsBit);
  if (Subtarget->hasMadMacF32Insts())
    Features.set(Feature_HasMadMacF32InstsBit);
  if (Subtarget->hasGFX10_3Insts())
    Features.set(Feature_HasFmaLegacy32Bit);
  if (Subtarget->hasAtomicDsPkAdd16Insts())
    Features.set(Feature_HasAtomicDsPkAdd16InstsBit);
  if (Subtarget->hasAtomicFlatPkAdd16Insts())
    Features.set(Feature_HasAtomicFlatPkAdd16InstsBit);
  if (Subtarget->hasAtomicFaddRtnInsts())
    Features.set(Feature_HasAtomicFaddRtnInstsBit);
  if (Subtarget->hasAtomicFaddNoRtnInsts())
    Features.set(Feature_HasAtomicFaddNoRtnInstsBit);
  if (Subtarget->hasAtomicBufferGlobalPkAddF16NoRtnInsts() || Subtarget->hasAtomicBufferGlobalPkAddF16Insts())
    Features.set(Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit);
  if (Subtarget->hasAtomicBufferGlobalPkAddF16Insts())
    Features.set(Feature_HasAtomicBufferGlobalPkAddF16InstsBit);
  if (Subtarget->hasAtomicGlobalPkAddBF16Inst())
    Features.set(Feature_HasAtomicGlobalPkAddBF16InstBit);
  if (Subtarget->hasAtomicBufferPkAddBF16Inst())
    Features.set(Feature_HasAtomicBufferPkAddBF16InstBit);
  if (Subtarget->hasFlatAtomicFaddF32Inst())
    Features.set(Feature_HasFlatAtomicFaddF32InstBit);
  if (Subtarget->enableFlatScratch())
    Features.set(Feature_EnableFlatScratchBit);
  if (!Subtarget->enableFlatScratch())
    Features.set(Feature_DisableFlatScratchBit);
  if (Subtarget->hasUnalignedAccessMode())
    Features.set(Feature_HasUnalignedAccessModeBit);
  if (Subtarget->hasMADIntraFwdBug())
    Features.set(Feature_HasMADIntraFwdBugBit);
  if (!Subtarget->hasMADIntraFwdBug())
    Features.set(Feature_HasNotMADIntraFwdBugBit);
  if (Subtarget->hasSALUFloatInsts())
    Features.set(Feature_HasSALUFloatInstsBit);
  if (Subtarget->hasPseudoScalarTrans())
    Features.set(Feature_HasPseudoScalarTransBit);
  if (Subtarget->hasGDS())
    Features.set(Feature_HasGDSBit);
  if (Subtarget->hasCvtFP8VOP1Bug())
    Features.set(Feature_HasCvtFP8VOP1BugBit);
  if (!Subtarget->hasCvtFP8VOP1Bug())
    Features.set(Feature_HasNoCvtFP8VOP1BugBit);
  if (Subtarget->hasAtomicCSubNoRtnInsts())
    Features.set(Feature_HasAtomicCSubNoRtnInstsBit);
  if (Subtarget->getWavefrontSize() == 32)
    Features.set(Feature_isWave32Bit);
  if (Subtarget->getWavefrontSize() == 64)
    Features.set(Feature_isWave64Bit);
  return Features;
}

void AMDGPUInstructionSelector::setupGeneratedPerFunctionState(MachineFunction &MF) {
  AvailableFunctionFeatures = computeAvailableFunctionFeatures((const AMDGPUSubtarget *)&MF.getSubtarget(), &MF);
}
PredicateBitset AMDGPUInstructionSelector::
computeAvailableFunctionFeatures(const AMDGPUSubtarget *Subtarget, const MachineFunction *MF) const {
  PredicateBitset Features{};
  if (MF->getInfo<SIMachineFunctionInfo>()->getMode().FP64FP16Denormals != DenormalMode::getPreserveSign())
    Features.set(Feature_FP16DenormalsBit);
  if (MF->getInfo<SIMachineFunctionInfo>()->getMode().FP64FP16Denormals != DenormalMode::getPreserveSign())
    Features.set(Feature_FP64DenormalsBit);
  if (MF->getInfo<SIMachineFunctionInfo>()->getMode().FP32Denormals == DenormalMode::getPreserveSign())
    Features.set(Feature_NoFP32DenormalsBit);
  return Features;
}

// Feature bitsets.
enum {
  GIFBS_Invalid,
  GIFBS_DisableFlatScratch,
  GIFBS_Has16BitInsts,
  GIFBS_HasAddNoCarryInsts,
  GIFBS_HasAtomicBufferGlobalPkAddF16Insts,
  GIFBS_HasAtomicBufferGlobalPkAddF16NoRtnInsts,
  GIFBS_HasAtomicBufferPkAddBF16Inst,
  GIFBS_HasAtomicCSubNoRtnInsts,
  GIFBS_HasAtomicFMinFMaxF32GlobalInsts,
  GIFBS_HasAtomicFMinFMaxF64GlobalInsts,
  GIFBS_HasAtomicFaddNoRtnInsts,
  GIFBS_HasAtomicFaddRtnInsts,
  GIFBS_HasAtomicFlatPkAdd16Insts,
  GIFBS_HasAtomicGlobalPkAddBF16Inst,
  GIFBS_HasD16LoadStore,
  GIFBS_HasDLInsts,
  GIFBS_HasDot10Insts,
  GIFBS_HasDot1Insts,
  GIFBS_HasDot2Insts,
  GIFBS_HasDot3Insts,
  GIFBS_HasDot4Insts,
  GIFBS_HasDot5Insts,
  GIFBS_HasDot6Insts,
  GIFBS_HasDot7Insts,
  GIFBS_HasDot8Insts,
  GIFBS_HasDot9Insts,
  GIFBS_HasFlatAddressSpace,
  GIFBS_HasFlatAtomicFaddF32Inst,
  GIFBS_HasFlatBufferGlobalAtomicFaddF64Inst,
  GIFBS_HasFlatGlobalInsts,
  GIFBS_HasFmaLegacy32,
  GIFBS_HasFmaMixInsts,
  GIFBS_HasFmacF64Inst,
  GIFBS_HasGDS,
  GIFBS_HasGFX10_BEncoding,
  GIFBS_HasGetWaveIdInst,
  GIFBS_HasIntClamp,
  GIFBS_HasLdsAtomicAddF64,
  GIFBS_HasMAIInsts,
  GIFBS_HasMadMacF32Insts,
  GIFBS_HasMed3_16,
  GIFBS_HasMinMaxDenormModes,
  GIFBS_HasPackedD16VMem,
  GIFBS_HasPackedFP32Ops,
  GIFBS_HasPseudoScalarTrans,
  GIFBS_HasSALUFloatInsts,
  GIFBS_HasSMemRealTime,
  GIFBS_HasSMemTimeInst,
  GIFBS_HasShaderCyclesHiLoRegisters,
  GIFBS_HasShaderCyclesRegister,
  GIFBS_HasTrue16BitInsts,
  GIFBS_HasUnpackedD16VMem,
  GIFBS_HasUnrestrictedSOffset,
  GIFBS_HasVOP3PInsts,
  GIFBS_HasXNACKEnabled,
  GIFBS_LDSRequiresM0Init,
  GIFBS_NotHasAddNoCarryInsts,
  GIFBS_NotHasTrue16BitInsts,
  GIFBS_NotLDSRequiresM0Init,
  GIFBS_UseFakeTrue16Insts,
  GIFBS_UseRealTrue16Insts,
  GIFBS_isGFX10Only,
  GIFBS_isGFX10Plus,
  GIFBS_isGFX11Only,
  GIFBS_isGFX11Plus,
  GIFBS_isGFX12Only,
  GIFBS_isGFX12Plus,
  GIFBS_isGFX6,
  GIFBS_isGFX6GFX7,
  GIFBS_isGFX6GFX7GFX8GFX9,
  GIFBS_isGFX7GFX8GFX9,
  GIFBS_isGFX7Only,
  GIFBS_isGFX7Plus,
  GIFBS_isGFX8GFX9,
  GIFBS_isGFX8GFX9GFX10GFX11,
  GIFBS_isGFX8Plus,
  GIFBS_isGFX908orGFX90A,
  GIFBS_isGFX90APlus,
  GIFBS_isGFX940Plus,
  GIFBS_isGFX9GFX10,
  GIFBS_isGFX9Only,
  GIFBS_isGFX9Plus,
  GIFBS_isNotGFX12Plus,
  GIFBS_isNotGFX90APlus,
  GIFBS_isNotGFX9Plus,
  GIFBS_isWave32,
  GIFBS_isWave64,
  GIFBS_DisableFlatScratch_HasD16LoadStore,
  GIFBS_DisableFlatScratch_HasUnrestrictedSOffset,
  GIFBS_EnableFlatScratch_HasFlatScratchInsts,
  GIFBS_FP16Denormals_NotHasMinMaxDenormModes,
  GIFBS_FalsePredicate_NotHasMinMaxDenormModes,
  GIFBS_Has16BitInsts_HasIntClamp,
  GIFBS_Has16BitInsts_NotHasTrue16BitInsts,
  GIFBS_Has16BitInsts_UseFakeTrue16Insts,
  GIFBS_Has16BitInsts_UseRealTrue16Insts,
  GIFBS_Has16BitInsts_isGFX6GFX7GFX8GFX9,
  GIFBS_Has16BitInsts_isGFX8GFX9,
  GIFBS_Has16BitInsts_isGFX8Only,
  GIFBS_Has16BitInsts_isNotGFX90APlus,
  GIFBS_HasAddNoCarryInsts_HasIntClamp,
  GIFBS_HasAtomicBufferGlobalPkAddF16Insts_HasUnrestrictedSOffset,
  GIFBS_HasAtomicBufferGlobalPkAddF16NoRtnInsts_HasUnrestrictedSOffset,
  GIFBS_HasAtomicBufferPkAddBF16Inst_HasUnrestrictedSOffset,
  GIFBS_HasAtomicCSubNoRtnInsts_isGFX12Plus,
  GIFBS_HasAtomicDsPkAdd16Insts_HasGDS,
  GIFBS_HasAtomicDsPkAdd16Insts_LDSRequiresM0Init,
  GIFBS_HasAtomicDsPkAdd16Insts_NotLDSRequiresM0Init,
  GIFBS_HasAtomicFMinFMaxF32FlatInsts_HasFlatAddressSpace,
  GIFBS_HasAtomicFMinFMaxF32FlatInsts_HasFlatGlobalInsts,
  GIFBS_HasAtomicFMinFMaxF32GlobalInsts_HasFlatGlobalInsts,
  GIFBS_HasAtomicFMinFMaxF32GlobalInsts_HasUnrestrictedSOffset,
  GIFBS_HasAtomicFMinFMaxF64FlatInsts_HasFlatAddressSpace,
  GIFBS_HasAtomicFMinFMaxF64FlatInsts_HasFlatGlobalInsts,
  GIFBS_HasAtomicFMinFMaxF64GlobalInsts_HasFlatGlobalInsts,
  GIFBS_HasAtomicFMinFMaxF64GlobalInsts_HasUnrestrictedSOffset,
  GIFBS_HasAtomicFaddNoRtnInsts_HasUnrestrictedSOffset,
  GIFBS_HasAtomicFaddRtnInsts_HasUnrestrictedSOffset,
  GIFBS_HasCvtFP8VOP1Bug_isGFX9Only,
  GIFBS_HasDot11Insts_isGFX12Plus,
  GIFBS_HasExportInsts_isGFX12Plus,
  GIFBS_HasFP8ConversionInsts_isGFX12Plus,
  GIFBS_HasFP8ConversionInsts_isGFX940Plus,
  GIFBS_HasFP8ConversionInsts_isGFX9Plus,
  GIFBS_HasFlatAddressSpace_isGFX12Plus,
  GIFBS_HasFlatBufferGlobalAtomicFaddF64Inst_HasUnrestrictedSOffset,
  GIFBS_HasFlatGlobalInsts_isGFX12Plus,
  GIFBS_HasGDS_HasLDSFPAtomicAddF32,
  GIFBS_HasGDS_isGFX11Plus,
  GIFBS_HasGDS_isGFX6GFX7GFX8GFX9GFX10,
  GIFBS_HasImageInsts_isGFX12Plus,
  GIFBS_HasLDSFPAtomicAddF32_LDSRequiresM0Init,
  GIFBS_HasLDSFPAtomicAddF32_NotLDSRequiresM0Init,
  GIFBS_HasMADIntraFwdBug_isGFX11Only,
  GIFBS_HasMadMacF32Insts_NoFP32Denormals,
  GIFBS_HasMadMixInsts_NoFP32Denormals,
  GIFBS_HasMinMaxDenormModes_isGFX12Plus,
  GIFBS_HasMinMaxDenormModes_isNotGFX12Plus,
  GIFBS_HasNoCvtFP8VOP1Bug_isGFX9Only,
  GIFBS_HasNotMADIntraFwdBug_isGFX9Plus,
  GIFBS_HasPackedD16VMem_HasUnrestrictedSOffset,
  GIFBS_HasScalarStores_isGFX8Plus,
  GIFBS_HasTrue16BitInsts_isGFX10Plus,
  GIFBS_HasUnalignedAccessMode_LDSRequiresM0Init,
  GIFBS_HasUnalignedAccessMode_NotLDSRequiresM0Init,
  GIFBS_HasXNACKEnabled_isGFX7Only,
  GIFBS_HasXNACKEnabled_isGFX9Plus,
  GIFBS_HasXNACKEnabled_isNotGFX9Plus,
  GIFBS_LDSRequiresM0Init_isGFX6GFX7GFX8GFX9GFX10,
  GIFBS_LDSRequiresM0Init_isGFX7Plus,
  GIFBS_NotHasTrue16BitInsts_isGFX10Plus,
  GIFBS_NotHasTrue16BitInsts_isGFX8Plus,
  GIFBS_NotLDSRequiresM0Init_isGFX6GFX7GFX8GFX9GFX10,
  GIFBS_NotLDSRequiresM0Init_isGFX7Plus,
  GIFBS_UseFakeTrue16Insts_isGFX8Plus,
  GIFBS_has16BankLDS_isNotGFX90APlus,
  GIFBS_has32BankLDS_isNotGFX90APlus,
  GIFBS_isGFX11Only_isWave32,
  GIFBS_isGFX11Only_isWave64,
  GIFBS_isGFX12Plus_isWave32,
  GIFBS_isGFX12Plus_isWave64,
  GIFBS_DisableFlatScratch_HasD16LoadStore_HasUnrestrictedSOffset,
  GIFBS_EnableFlatScratch_HasD16LoadStore_HasFlatScratchInsts,
  GIFBS_EnableFlatScratch_HasFlatScratchInsts_HasFlatScratchSVSMode,
  GIFBS_FP64Denormals_NotHasMinMaxDenormModes_isGFX12Plus,
  GIFBS_FP64Denormals_NotHasMinMaxDenormModes_isNotGFX12Plus,
  GIFBS_Has16BitInsts_HasMinMaxDenormModes_HasTrue16BitInsts,
  GIFBS_Has16BitInsts_HasMinMaxDenormModes_NotHasTrue16BitInsts,
  GIFBS_Has16BitInsts_has32BankLDS_isNotGFX90APlus,
  GIFBS_HasMadMacF32Insts_NoFP32Denormals_isGFX6GFX7GFX10,
  GIFBS_EnableFlatScratch_HasD16LoadStore_HasFlatScratchInsts_HasFlatScratchSVSMode,
  GIFBS_FP16Denormals_Has16BitInsts_HasTrue16BitInsts_NotHasMinMaxDenormModes,
  GIFBS_FP16Denormals_Has16BitInsts_NotHasMinMaxDenormModes_NotHasTrue16BitInsts,
};
constexpr static PredicateBitset FeatureBitsets[] {
  {}, // GIFBS_Invalid
  {Feature_DisableFlatScratchBit, },
  {Feature_Has16BitInstsBit, },
  {Feature_HasAddNoCarryInstsBit, },
  {Feature_HasAtomicBufferGlobalPkAddF16InstsBit, },
  {Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit, },
  {Feature_HasAtomicBufferPkAddBF16InstBit, },
  {Feature_HasAtomicCSubNoRtnInstsBit, },
  {Feature_HasAtomicFMinFMaxF32GlobalInstsBit, },
  {Feature_HasAtomicFMinFMaxF64GlobalInstsBit, },
  {Feature_HasAtomicFaddNoRtnInstsBit, },
  {Feature_HasAtomicFaddRtnInstsBit, },
  {Feature_HasAtomicFlatPkAdd16InstsBit, },
  {Feature_HasAtomicGlobalPkAddBF16InstBit, },
  {Feature_HasD16LoadStoreBit, },
  {Feature_HasDLInstsBit, },
  {Feature_HasDot10InstsBit, },
  {Feature_HasDot1InstsBit, },
  {Feature_HasDot2InstsBit, },
  {Feature_HasDot3InstsBit, },
  {Feature_HasDot4InstsBit, },
  {Feature_HasDot5InstsBit, },
  {Feature_HasDot6InstsBit, },
  {Feature_HasDot7InstsBit, },
  {Feature_HasDot8InstsBit, },
  {Feature_HasDot9InstsBit, },
  {Feature_HasFlatAddressSpaceBit, },
  {Feature_HasFlatAtomicFaddF32InstBit, },
  {Feature_HasFlatBufferGlobalAtomicFaddF64InstBit, },
  {Feature_HasFlatGlobalInstsBit, },
  {Feature_HasFmaLegacy32Bit, },
  {Feature_HasFmaMixInstsBit, },
  {Feature_HasFmacF64InstBit, },
  {Feature_HasGDSBit, },
  {Feature_HasGFX10_BEncodingBit, },
  {Feature_HasGetWaveIdInstBit, },
  {Feature_HasIntClampBit, },
  {Feature_HasLdsAtomicAddF64Bit, },
  {Feature_HasMAIInstsBit, },
  {Feature_HasMadMacF32InstsBit, },
  {Feature_HasMed3_16Bit, },
  {Feature_HasMinMaxDenormModesBit, },
  {Feature_HasPackedD16VMemBit, },
  {Feature_HasPackedFP32OpsBit, },
  {Feature_HasPseudoScalarTransBit, },
  {Feature_HasSALUFloatInstsBit, },
  {Feature_HasSMemRealTimeBit, },
  {Feature_HasSMemTimeInstBit, },
  {Feature_HasShaderCyclesHiLoRegistersBit, },
  {Feature_HasShaderCyclesRegisterBit, },
  {Feature_HasTrue16BitInstsBit, },
  {Feature_HasUnpackedD16VMemBit, },
  {Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasVOP3PInstsBit, },
  {Feature_HasXNACKEnabledBit, },
  {Feature_LDSRequiresM0InitBit, },
  {Feature_NotHasAddNoCarryInstsBit, },
  {Feature_NotHasTrue16BitInstsBit, },
  {Feature_NotLDSRequiresM0InitBit, },
  {Feature_UseFakeTrue16InstsBit, },
  {Feature_UseRealTrue16InstsBit, },
  {Feature_isGFX10OnlyBit, },
  {Feature_isGFX10PlusBit, },
  {Feature_isGFX11OnlyBit, },
  {Feature_isGFX11PlusBit, },
  {Feature_isGFX12OnlyBit, },
  {Feature_isGFX12PlusBit, },
  {Feature_isGFX6Bit, },
  {Feature_isGFX6GFX7Bit, },
  {Feature_isGFX6GFX7GFX8GFX9Bit, },
  {Feature_isGFX7GFX8GFX9Bit, },
  {Feature_isGFX7OnlyBit, },
  {Feature_isGFX7PlusBit, },
  {Feature_isGFX8GFX9Bit, },
  {Feature_isGFX8GFX9GFX10GFX11Bit, },
  {Feature_isGFX8PlusBit, },
  {Feature_isGFX908orGFX90ABit, },
  {Feature_isGFX90APlusBit, },
  {Feature_isGFX940PlusBit, },
  {Feature_isGFX9GFX10Bit, },
  {Feature_isGFX9OnlyBit, },
  {Feature_isGFX9PlusBit, },
  {Feature_isNotGFX12PlusBit, },
  {Feature_isNotGFX90APlusBit, },
  {Feature_isNotGFX9PlusBit, },
  {Feature_isWave32Bit, },
  {Feature_isWave64Bit, },
  {Feature_DisableFlatScratchBit, Feature_HasD16LoadStoreBit, },
  {Feature_DisableFlatScratchBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_EnableFlatScratchBit, Feature_HasFlatScratchInstsBit, },
  {Feature_FP16DenormalsBit, Feature_NotHasMinMaxDenormModesBit, },
  {Feature_FalsePredicateBit, Feature_NotHasMinMaxDenormModesBit, },
  {Feature_Has16BitInstsBit, Feature_HasIntClampBit, },
  {Feature_Has16BitInstsBit, Feature_NotHasTrue16BitInstsBit, },
  {Feature_Has16BitInstsBit, Feature_UseFakeTrue16InstsBit, },
  {Feature_Has16BitInstsBit, Feature_UseRealTrue16InstsBit, },
  {Feature_Has16BitInstsBit, Feature_isGFX6GFX7GFX8GFX9Bit, },
  {Feature_Has16BitInstsBit, Feature_isGFX8GFX9Bit, },
  {Feature_Has16BitInstsBit, Feature_isGFX8OnlyBit, },
  {Feature_Has16BitInstsBit, Feature_isNotGFX90APlusBit, },
  {Feature_HasAddNoCarryInstsBit, Feature_HasIntClampBit, },
  {Feature_HasAtomicBufferGlobalPkAddF16InstsBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasAtomicBufferGlobalPkAddF16NoRtnInstsBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasAtomicBufferPkAddBF16InstBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasAtomicCSubNoRtnInstsBit, Feature_isGFX12PlusBit, },
  {Feature_HasAtomicDsPkAdd16InstsBit, Feature_HasGDSBit, },
  {Feature_HasAtomicDsPkAdd16InstsBit, Feature_LDSRequiresM0InitBit, },
  {Feature_HasAtomicDsPkAdd16InstsBit, Feature_NotLDSRequiresM0InitBit, },
  {Feature_HasAtomicFMinFMaxF32FlatInstsBit, Feature_HasFlatAddressSpaceBit, },
  {Feature_HasAtomicFMinFMaxF32FlatInstsBit, Feature_HasFlatGlobalInstsBit, },
  {Feature_HasAtomicFMinFMaxF32GlobalInstsBit, Feature_HasFlatGlobalInstsBit, },
  {Feature_HasAtomicFMinFMaxF32GlobalInstsBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasAtomicFMinFMaxF64FlatInstsBit, Feature_HasFlatAddressSpaceBit, },
  {Feature_HasAtomicFMinFMaxF64FlatInstsBit, Feature_HasFlatGlobalInstsBit, },
  {Feature_HasAtomicFMinFMaxF64GlobalInstsBit, Feature_HasFlatGlobalInstsBit, },
  {Feature_HasAtomicFMinFMaxF64GlobalInstsBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasAtomicFaddNoRtnInstsBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasAtomicFaddRtnInstsBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasCvtFP8VOP1BugBit, Feature_isGFX9OnlyBit, },
  {Feature_HasDot11InstsBit, Feature_isGFX12PlusBit, },
  {Feature_HasExportInstsBit, Feature_isGFX12PlusBit, },
  {Feature_HasFP8ConversionInstsBit, Feature_isGFX12PlusBit, },
  {Feature_HasFP8ConversionInstsBit, Feature_isGFX940PlusBit, },
  {Feature_HasFP8ConversionInstsBit, Feature_isGFX9PlusBit, },
  {Feature_HasFlatAddressSpaceBit, Feature_isGFX12PlusBit, },
  {Feature_HasFlatBufferGlobalAtomicFaddF64InstBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasFlatGlobalInstsBit, Feature_isGFX12PlusBit, },
  {Feature_HasGDSBit, Feature_HasLDSFPAtomicAddF32Bit, },
  {Feature_HasGDSBit, Feature_isGFX11PlusBit, },
  {Feature_HasGDSBit, Feature_isGFX6GFX7GFX8GFX9GFX10Bit, },
  {Feature_HasImageInstsBit, Feature_isGFX12PlusBit, },
  {Feature_HasLDSFPAtomicAddF32Bit, Feature_LDSRequiresM0InitBit, },
  {Feature_HasLDSFPAtomicAddF32Bit, Feature_NotLDSRequiresM0InitBit, },
  {Feature_HasMADIntraFwdBugBit, Feature_isGFX11OnlyBit, },
  {Feature_HasMadMacF32InstsBit, Feature_NoFP32DenormalsBit, },
  {Feature_HasMadMixInstsBit, Feature_NoFP32DenormalsBit, },
  {Feature_HasMinMaxDenormModesBit, Feature_isGFX12PlusBit, },
  {Feature_HasMinMaxDenormModesBit, Feature_isNotGFX12PlusBit, },
  {Feature_HasNoCvtFP8VOP1BugBit, Feature_isGFX9OnlyBit, },
  {Feature_HasNotMADIntraFwdBugBit, Feature_isGFX9PlusBit, },
  {Feature_HasPackedD16VMemBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_HasScalarStoresBit, Feature_isGFX8PlusBit, },
  {Feature_HasTrue16BitInstsBit, Feature_isGFX10PlusBit, },
  {Feature_HasUnalignedAccessModeBit, Feature_LDSRequiresM0InitBit, },
  {Feature_HasUnalignedAccessModeBit, Feature_NotLDSRequiresM0InitBit, },
  {Feature_HasXNACKEnabledBit, Feature_isGFX7OnlyBit, },
  {Feature_HasXNACKEnabledBit, Feature_isGFX9PlusBit, },
  {Feature_HasXNACKEnabledBit, Feature_isNotGFX9PlusBit, },
  {Feature_LDSRequiresM0InitBit, Feature_isGFX6GFX7GFX8GFX9GFX10Bit, },
  {Feature_LDSRequiresM0InitBit, Feature_isGFX7PlusBit, },
  {Feature_NotHasTrue16BitInstsBit, Feature_isGFX10PlusBit, },
  {Feature_NotHasTrue16BitInstsBit, Feature_isGFX8PlusBit, },
  {Feature_NotLDSRequiresM0InitBit, Feature_isGFX6GFX7GFX8GFX9GFX10Bit, },
  {Feature_NotLDSRequiresM0InitBit, Feature_isGFX7PlusBit, },
  {Feature_UseFakeTrue16InstsBit, Feature_isGFX8PlusBit, },
  {Feature_has16BankLDSBit, Feature_isNotGFX90APlusBit, },
  {Feature_has32BankLDSBit, Feature_isNotGFX90APlusBit, },
  {Feature_isGFX11OnlyBit, Feature_isWave32Bit, },
  {Feature_isGFX11OnlyBit, Feature_isWave64Bit, },
  {Feature_isGFX12PlusBit, Feature_isWave32Bit, },
  {Feature_isGFX12PlusBit, Feature_isWave64Bit, },
  {Feature_DisableFlatScratchBit, Feature_HasD16LoadStoreBit, Feature_HasUnrestrictedSOffsetBit, },
  {Feature_EnableFlatScratchBit, Feature_HasD16LoadStoreBit, Feature_HasFlatScratchInstsBit, },
  {Feature_EnableFlatScratchBit, Feature_HasFlatScratchInstsBit, Feature_HasFlatScratchSVSModeBit, },
  {Feature_FP64DenormalsBit, Feature_NotHasMinMaxDenormModesBit, Feature_isGFX12PlusBit, },
  {Feature_FP64DenormalsBit, Feature_NotHasMinMaxDenormModesBit, Feature_isNotGFX12PlusBit, },
  {Feature_Has16BitInstsBit, Feature_HasMinMaxDenormModesBit, Feature_HasTrue16BitInstsBit, },
  {Feature_Has16BitInstsBit, Feature_HasMinMaxDenormModesBit, Feature_NotHasTrue16BitInstsBit, },
  {Feature_Has16BitInstsBit, Feature_has32BankLDSBit, Feature_isNotGFX90APlusBit, },
  {Feature_HasMadMacF32InstsBit, Feature_NoFP32DenormalsBit, Feature_isGFX6GFX7GFX10Bit, },
  {Feature_EnableFlatScratchBit, Feature_HasD16LoadStoreBit, Feature_HasFlatScratchInstsBit, Feature_HasFlatScratchSVSModeBit, },
  {Feature_FP16DenormalsBit, Feature_Has16BitInstsBit, Feature_HasTrue16BitInstsBit, Feature_NotHasMinMaxDenormModesBit, },
  {Feature_FP16DenormalsBit, Feature_Has16BitInstsBit, Feature_NotHasMinMaxDenormModesBit, Feature_NotHasTrue16BitInstsBit, },
};

// ComplexPattern predicates.
enum {
  GICP_Invalid,
  GICP_gi_buf_soffset,
  GICP_gi_ds_128bit_8byte_aligned,
  GICP_gi_ds_1addr_1offset,
  GICP_gi_ds_64bit_4byte_aligned,
  GICP_gi_flat_offset,
  GICP_gi_flat_scratch_offset,
  GICP_gi_flat_scratch_saddr,
  GICP_gi_flat_scratch_svaddr,
  GICP_gi_global_offset,
  GICP_gi_global_saddr,
  GICP_gi_mubuf_addr64,
  GICP_gi_mubuf_offset,
  GICP_gi_mubuf_scratch_offen,
  GICP_gi_mubuf_scratch_offset,
  GICP_gi_smrd_buffer_imm,
  GICP_gi_smrd_buffer_imm32,
  GICP_gi_smrd_buffer_sgpr_imm,
  GICP_gi_smrd_imm,
  GICP_gi_smrd_imm32,
  GICP_gi_smrd_sgpr,
  GICP_gi_smrd_sgpr_imm,
  GICP_gi_swmmacindex16,
  GICP_gi_swmmacindex8,
  GICP_gi_vcsrc,
  GICP_gi_vinterpmods,
  GICP_gi_vinterpmods_hi,
  GICP_gi_vop3_mad_mix_mods,
  GICP_gi_vop3_mad_mix_mods_ext,
  GICP_gi_vop3_no_mods,
  GICP_gi_vop3mods,
  GICP_gi_vop3mods0,
  GICP_gi_vop3modsnoncanonicalizing,
  GICP_gi_vop3omods,
  GICP_gi_vop3opsel,
  GICP_gi_vop3opselmods,
  GICP_gi_vop3pmods,
  GICP_gi_vop3pmodsdot,
  GICP_gi_vop3pmodsneg,
  GICP_gi_vsrc0,
  GICP_gi_wmmamods,
  GICP_gi_wmmamodsf16Neg,
  GICP_gi_wmmamodsf16NegAbs,
  GICP_gi_wmmaopselvop3pmods,
  GICP_gi_wmmavisrc,
};
// See constructor for table contents

AMDGPUInstructionSelector::ComplexMatcherMemFn
AMDGPUInstructionSelector::ComplexPredicateFns[] = {
  nullptr, // GICP_Invalid
  &AMDGPUInstructionSelector::selectBUFSOffset, // gi_buf_soffset
  &AMDGPUInstructionSelector::selectDS128Bit8ByteAligned, // gi_ds_128bit_8byte_aligned
  &AMDGPUInstructionSelector::selectDS1Addr1Offset, // gi_ds_1addr_1offset
  &AMDGPUInstructionSelector::selectDS64Bit4ByteAligned, // gi_ds_64bit_4byte_aligned
  &AMDGPUInstructionSelector::selectFlatOffset, // gi_flat_offset
  &AMDGPUInstructionSelector::selectScratchOffset, // gi_flat_scratch_offset
  &AMDGPUInstructionSelector::selectScratchSAddr, // gi_flat_scratch_saddr
  &AMDGPUInstructionSelector::selectScratchSVAddr, // gi_flat_scratch_svaddr
  &AMDGPUInstructionSelector::selectGlobalOffset, // gi_global_offset
  &AMDGPUInstructionSelector::selectGlobalSAddr, // gi_global_saddr
  &AMDGPUInstructionSelector::selectMUBUFAddr64, // gi_mubuf_addr64
  &AMDGPUInstructionSelector::selectMUBUFOffset, // gi_mubuf_offset
  &AMDGPUInstructionSelector::selectMUBUFScratchOffen, // gi_mubuf_scratch_offen
  &AMDGPUInstructionSelector::selectMUBUFScratchOffset, // gi_mubuf_scratch_offset
  &AMDGPUInstructionSelector::selectSMRDBufferImm, // gi_smrd_buffer_imm
  &AMDGPUInstructionSelector::selectSMRDBufferImm32, // gi_smrd_buffer_imm32
  &AMDGPUInstructionSelector::selectSMRDBufferSgprImm, // gi_smrd_buffer_sgpr_imm
  &AMDGPUInstructionSelector::selectSmrdImm, // gi_smrd_imm
  &AMDGPUInstructionSelector::selectSmrdImm32, // gi_smrd_imm32
  &AMDGPUInstructionSelector::selectSmrdSgpr, // gi_smrd_sgpr
  &AMDGPUInstructionSelector::selectSmrdSgprImm, // gi_smrd_sgpr_imm
  &AMDGPUInstructionSelector::selectSWMMACIndex16, // gi_swmmacindex16
  &AMDGPUInstructionSelector::selectSWMMACIndex8, // gi_swmmacindex8
  &AMDGPUInstructionSelector::selectVCSRC, // gi_vcsrc
  &AMDGPUInstructionSelector::selectVINTERPMods, // gi_vinterpmods
  &AMDGPUInstructionSelector::selectVINTERPModsHi, // gi_vinterpmods_hi
  &AMDGPUInstructionSelector::selectVOP3PMadMixMods, // gi_vop3_mad_mix_mods
  &AMDGPUInstructionSelector::selectVOP3PMadMixModsExt, // gi_vop3_mad_mix_mods_ext
  &AMDGPUInstructionSelector::selectVOP3NoMods, // gi_vop3_no_mods
  &AMDGPUInstructionSelector::selectVOP3Mods, // gi_vop3mods
  &AMDGPUInstructionSelector::selectVOP3Mods0, // gi_vop3mods0
  &AMDGPUInstructionSelector::selectVOP3ModsNonCanonicalizing, // gi_vop3modsnoncanonicalizing
  &AMDGPUInstructionSelector::selectVOP3OMods, // gi_vop3omods
  &AMDGPUInstructionSelector::selectVOP3OpSelMods, // gi_vop3opsel
  &AMDGPUInstructionSelector::selectVOP3OpSelMods, // gi_vop3opselmods
  &AMDGPUInstructionSelector::selectVOP3PMods, // gi_vop3pmods
  &AMDGPUInstructionSelector::selectVOP3PModsDOT, // gi_vop3pmodsdot
  &AMDGPUInstructionSelector::selectVOP3PModsNeg, // gi_vop3pmodsneg
  &AMDGPUInstructionSelector::selectVSRC0, // gi_vsrc0
  &AMDGPUInstructionSelector::selectWMMAModsF32NegAbs, // gi_wmmamods
  &AMDGPUInstructionSelector::selectWMMAModsF16Neg, // gi_wmmamodsf16Neg
  &AMDGPUInstructionSelector::selectWMMAModsF16NegAbs, // gi_wmmamodsf16NegAbs
  &AMDGPUInstructionSelector::selectWMMAOpSelVOP3PMods, // gi_wmmaopselvop3pmods
  &AMDGPUInstructionSelector::selectWMMAVISrc, // gi_wmmavisrc
};

// PatFrag predicates.
enum {
  GICXXPred_MI_Predicate_aligned_smrd_load = GICXXPred_Invalid + 1,
  GICXXPred_MI_Predicate_anonymous_18616,
  GICXXPred_MI_Predicate_anonymous_18619,
  GICXXPred_MI_Predicate_anonymous_18620,
  GICXXPred_MI_Predicate_anonymous_18621,
  GICXXPred_MI_Predicate_anonymous_18622,
  GICXXPred_MI_Predicate_anonymous_18623,
  GICXXPred_MI_Predicate_anonymous_18626,
  GICXXPred_MI_Predicate_anonymous_18627,
  GICXXPred_MI_Predicate_anonymous_18628,
  GICXXPred_MI_Predicate_anonymous_18629,
  GICXXPred_MI_Predicate_anonymous_18630,
  GICXXPred_MI_Predicate_anonymous_18631,
  GICXXPred_MI_Predicate_anonymous_18632,
  GICXXPred_MI_Predicate_anonymous_18633,
  GICXXPred_MI_Predicate_anonymous_18634,
  GICXXPred_MI_Predicate_anonymous_18635,
  GICXXPred_MI_Predicate_anonymous_18636,
  GICXXPred_MI_Predicate_anonymous_18637,
  GICXXPred_MI_Predicate_anonymous_18638,
  GICXXPred_MI_Predicate_anonymous_18639,
  GICXXPred_MI_Predicate_anonymous_18640,
  GICXXPred_MI_Predicate_anonymous_18641,
  GICXXPred_MI_Predicate_anonymous_18642,
  GICXXPred_MI_Predicate_anonymous_18643,
  GICXXPred_MI_Predicate_anonymous_18644,
  GICXXPred_MI_Predicate_anonymous_18645,
  GICXXPred_MI_Predicate_anonymous_18646,
  GICXXPred_MI_Predicate_anonymous_18647,
  GICXXPred_MI_Predicate_anonymous_18648,
  GICXXPred_MI_Predicate_anonymous_18649,
  GICXXPred_MI_Predicate_anonymous_18650,
  GICXXPred_MI_Predicate_anonymous_18651,
  GICXXPred_MI_Predicate_anonymous_18652,
  GICXXPred_MI_Predicate_anonymous_18653,
  GICXXPred_MI_Predicate_anonymous_18654,
  GICXXPred_MI_Predicate_anonymous_18655,
  GICXXPred_MI_Predicate_anonymous_18656,
  GICXXPred_MI_Predicate_anonymous_18657,
  GICXXPred_MI_Predicate_anonymous_18658,
  GICXXPred_MI_Predicate_anonymous_18659,
  GICXXPred_MI_Predicate_anonymous_18660,
  GICXXPred_MI_Predicate_anonymous_18661,
  GICXXPred_MI_Predicate_anonymous_18662,
  GICXXPred_MI_Predicate_anonymous_18663,
  GICXXPred_MI_Predicate_anonymous_18664,
  GICXXPred_MI_Predicate_anonymous_18665,
  GICXXPred_MI_Predicate_anonymous_18666,
  GICXXPred_MI_Predicate_anonymous_18667,
  GICXXPred_MI_Predicate_anonymous_18674,
  GICXXPred_MI_Predicate_anonymous_18686,
  GICXXPred_MI_Predicate_anonymous_22878,
  GICXXPred_MI_Predicate_anonymous_23712,
  GICXXPred_MI_Predicate_anonymous_23714,
  GICXXPred_MI_Predicate_anonymous_23996,
  GICXXPred_MI_Predicate_anonymous_23998,
  GICXXPred_MI_Predicate_anonymous_24000,
  GICXXPred_MI_Predicate_anonymous_24002,
  GICXXPred_MI_Predicate_anonymous_24004,
  GICXXPred_MI_Predicate_anonymous_24008,
  GICXXPred_MI_Predicate_anonymous_24010,
  GICXXPred_MI_Predicate_anonymous_24012,
  GICXXPred_MI_Predicate_anonymous_24014,
  GICXXPred_MI_Predicate_anonymous_24016,
  GICXXPred_MI_Predicate_anonymous_24264,
  GICXXPred_MI_Predicate_anonymous_24266,
  GICXXPred_MI_Predicate_anonymous_24493,
  GICXXPred_MI_Predicate_anonymous_24503,
  GICXXPred_MI_Predicate_anonymous_24506,
  GICXXPred_MI_Predicate_anonymous_24594,
  GICXXPred_MI_Predicate_anonymous_24875,
  GICXXPred_MI_Predicate_anonymous_24878,
  GICXXPred_MI_Predicate_anonymous_24881,
  GICXXPred_MI_Predicate_anonymous_24883,
  GICXXPred_MI_Predicate_anonymous_24886,
  GICXXPred_MI_Predicate_anonymous_24888,
  GICXXPred_MI_Predicate_anonymous_24891,
  GICXXPred_MI_Predicate_anonymous_24894,
  GICXXPred_MI_Predicate_anonymous_24898,
  GICXXPred_MI_Predicate_anonymous_24906,
  GICXXPred_MI_Predicate_anonymous_24940,
  GICXXPred_MI_Predicate_anonymous_25048,
  GICXXPred_MI_Predicate_anonymous_25051,
  GICXXPred_MI_Predicate_anonymous_25053,
  GICXXPred_MI_Predicate_anonymous_25055,
  GICXXPred_MI_Predicate_anonymous_25057,
  GICXXPred_MI_Predicate_anonymous_25059,
  GICXXPred_MI_Predicate_anonymous_25061,
  GICXXPred_MI_Predicate_anonymous_25975,
  GICXXPred_MI_Predicate_anonymous_25977,
  GICXXPred_MI_Predicate_anonymous_25981,
  GICXXPred_MI_Predicate_anonymous_25983,
  GICXXPred_MI_Predicate_anonymous_25985,
  GICXXPred_MI_Predicate_anonymous_25987,
  GICXXPred_MI_Predicate_anonymous_25991,
  GICXXPred_MI_Predicate_anonymous_25993,
  GICXXPred_MI_Predicate_anonymous_25995,
  GICXXPred_MI_Predicate_anonymous_25997,
  GICXXPred_MI_Predicate_anonymous_25999,
  GICXXPred_MI_Predicate_anonymous_26001,
  GICXXPred_MI_Predicate_anonymous_26003,
  GICXXPred_MI_Predicate_anonymous_26005,
  GICXXPred_MI_Predicate_anonymous_26009,
  GICXXPred_MI_Predicate_anonymous_26011,
  GICXXPred_MI_Predicate_anonymous_26013,
  GICXXPred_MI_Predicate_anonymous_26015,
  GICXXPred_MI_Predicate_anonymous_26019,
  GICXXPred_MI_Predicate_anonymous_26021,
  GICXXPred_MI_Predicate_anonymous_26025,
  GICXXPred_MI_Predicate_anonymous_26027,
  GICXXPred_MI_Predicate_anonymous_26029,
  GICXXPred_MI_Predicate_anonymous_26031,
  GICXXPred_MI_Predicate_anonymous_26035,
  GICXXPred_MI_Predicate_anonymous_26037,
  GICXXPred_MI_Predicate_anonymous_26039,
  GICXXPred_MI_Predicate_anonymous_26041,
  GICXXPred_MI_Predicate_anonymous_26043,
  GICXXPred_MI_Predicate_anonymous_26045,
  GICXXPred_MI_Predicate_anonymous_26047,
  GICXXPred_MI_Predicate_anonymous_26049,
  GICXXPred_MI_Predicate_anonymous_26053,
  GICXXPred_MI_Predicate_anonymous_26055,
  GICXXPred_MI_Predicate_anonymous_26057,
  GICXXPred_MI_Predicate_anonymous_26059,
  GICXXPred_MI_Predicate_anonymous_26063,
  GICXXPred_MI_Predicate_anonymous_26065,
  GICXXPred_MI_Predicate_anonymous_26067,
  GICXXPred_MI_Predicate_anonymous_26069,
  GICXXPred_MI_Predicate_anonymous_26073,
  GICXXPred_MI_Predicate_anonymous_26075,
  GICXXPred_MI_Predicate_anonymous_26079,
  GICXXPred_MI_Predicate_anonymous_26081,
  GICXXPred_MI_Predicate_anonymous_26083,
  GICXXPred_MI_Predicate_anonymous_26085,
  GICXXPred_MI_Predicate_anonymous_26087,
  GICXXPred_MI_Predicate_anonymous_26089,
  GICXXPred_MI_Predicate_anonymous_26091,
  GICXXPred_MI_Predicate_anonymous_26093,
  GICXXPred_MI_Predicate_anonymous_26097,
  GICXXPred_MI_Predicate_anonymous_26099,
  GICXXPred_MI_Predicate_anonymous_26101,
  GICXXPred_MI_Predicate_anonymous_26103,
  GICXXPred_MI_Predicate_anonymous_26107,
  GICXXPred_MI_Predicate_anonymous_26109,
  GICXXPred_MI_Predicate_anonymous_26111,
  GICXXPred_MI_Predicate_anonymous_26113,
  GICXXPred_MI_Predicate_anonymous_26115,
  GICXXPred_MI_Predicate_anonymous_26117,
  GICXXPred_MI_Predicate_anonymous_26121,
  GICXXPred_MI_Predicate_anonymous_26123,
  GICXXPred_MI_Predicate_anonymous_26125,
  GICXXPred_MI_Predicate_anonymous_26127,
  GICXXPred_MI_Predicate_anonymous_26129,
  GICXXPred_MI_Predicate_anonymous_26131,
  GICXXPred_MI_Predicate_anonymous_26133,
  GICXXPred_MI_Predicate_anonymous_26135,
  GICXXPred_MI_Predicate_anonymous_26137,
  GICXXPred_MI_Predicate_anonymous_26139,
  GICXXPred_MI_Predicate_anonymous_26143,
  GICXXPred_MI_Predicate_anonymous_26145,
  GICXXPred_MI_Predicate_anonymous_26147,
  GICXXPred_MI_Predicate_anonymous_26149,
  GICXXPred_MI_Predicate_anonymous_26151,
  GICXXPred_MI_Predicate_anonymous_26153,
  GICXXPred_MI_Predicate_anonymous_26155,
  GICXXPred_MI_Predicate_anonymous_26157,
  GICXXPred_MI_Predicate_anonymous_36507,
  GICXXPred_MI_Predicate_anonymous_36814,
  GICXXPred_MI_Predicate_anonymous_36816,
  GICXXPred_MI_Predicate_anonymous_36862,
  GICXXPred_MI_Predicate_anonymous_36875,
  GICXXPred_MI_Predicate_anonymous_36876,
  GICXXPred_MI_Predicate_anonymous_36879,
  GICXXPred_MI_Predicate_anonymous_36882,
  GICXXPred_MI_Predicate_anonymous_36885,
  GICXXPred_MI_Predicate_anonymous_36891,
  GICXXPred_MI_Predicate_anonymous_36894,
  GICXXPred_MI_Predicate_anonymous_36896,
  GICXXPred_MI_Predicate_anonymous_36903,
  GICXXPred_MI_Predicate_anonymous_36906,
  GICXXPred_MI_Predicate_anonymous_36908,
  GICXXPred_MI_Predicate_anonymous_36916,
  GICXXPred_MI_Predicate_anonymous_36921,
  GICXXPred_MI_Predicate_anonymous_36925,
  GICXXPred_MI_Predicate_anonymous_36929,
  GICXXPred_MI_Predicate_anonymous_37075,
  GICXXPred_MI_Predicate_anonymous_37078,
  GICXXPred_MI_Predicate_anonymous_37083,
  GICXXPred_MI_Predicate_anonymous_37087,
  GICXXPred_MI_Predicate_anonymous_37133,
  GICXXPred_MI_Predicate_anonymous_37135,
  GICXXPred_MI_Predicate_anonymous_37174,
  GICXXPred_MI_Predicate_anonymous_37197,
  GICXXPred_MI_Predicate_anonymous_37199,
  GICXXPred_MI_Predicate_anonymous_37207,
  GICXXPred_MI_Predicate_anonymous_37209,
  GICXXPred_MI_Predicate_anonymous_37226,
  GICXXPred_MI_Predicate_anonymous_37228,
  GICXXPred_MI_Predicate_anonymous_37230,
  GICXXPred_MI_Predicate_anonymous_37232,
  GICXXPred_MI_Predicate_anonymous_37234,
  GICXXPred_MI_Predicate_csh_mask_16,
  GICXXPred_MI_Predicate_csh_mask_32,
  GICXXPred_MI_Predicate_csh_mask_64,
  GICXXPred_MI_Predicate_fmaxnum_like_nnan,
  GICXXPred_MI_Predicate_fminnum_like_nnan,
  GICXXPred_MI_Predicate_is_canonicalized,
  GICXXPred_MI_Predicate_load_align_less_than_4_local,
  GICXXPred_MI_Predicate_load_align_less_than_4_local_m0,
  GICXXPred_MI_Predicate_shl1_add,
  GICXXPred_MI_Predicate_shl2_add,
  GICXXPred_MI_Predicate_shl3_add,
  GICXXPred_MI_Predicate_shl4_add,
  GICXXPred_MI_Predicate_shl_0_to_4,
  GICXXPred_MI_Predicate_smrd_extloadi8,
  GICXXPred_MI_Predicate_smrd_extloadi16,
  GICXXPred_MI_Predicate_smrd_load,
  GICXXPred_MI_Predicate_smrd_prefetch,
  GICXXPred_MI_Predicate_smrd_sextloadi8,
  GICXXPred_MI_Predicate_smrd_sextloadi16,
  GICXXPred_MI_Predicate_smrd_zextloadi8,
  GICXXPred_MI_Predicate_smrd_zextloadi16,
  GICXXPred_MI_Predicate_store_align_less_than_4_local,
  GICXXPred_MI_Predicate_store_align_less_than_4_local_m0,
};
bool AMDGPUInstructionSelector::testMIPredicate_MI(unsigned PredicateID, const MachineInstr & MI, const MatcherState &State) const {
  const MachineFunction &MF = *MI.getParent()->getParent();
  const MachineRegisterInfo &MRI = MF.getRegInfo();
  const auto &Operands = State.RecordedOperands;
  (void)Operands;
  (void)MRI;
  switch (PredicateID) {
  case GICXXPred_MI_Predicate_aligned_smrd_load: {
    
      auto &Ld = cast<GLoad>(MI);
      TypeSize Size = Ld.getMMO().getSize().getValue();
      return Size <= 4 || Ld.getMMO().getAlign().value() >= Size;
      
    llvm_unreachable("aligned_smrd_load should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_18616: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18619: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18620: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18621: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18622: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18623: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18626: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18627: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18628: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18629: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18630: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18631: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18632: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18633: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18634: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18635: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18636: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18637: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18638: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18639: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18640: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18641: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18642: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18643: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18644: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18645: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18646: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18647: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18648: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18649: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18650: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18651: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18652: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18653: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18654: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18655: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18656: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18657: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18658: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18659: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18660: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18661: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18662: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18663: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18664: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18665: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18666: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18667: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18674: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_18686: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_22878: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_23712: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_23714: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_23996: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_23998: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24000: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24002: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24004: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24008: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24010: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24012: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24014: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24016: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24264: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24266: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24493: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24503: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24506: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24594: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_24875: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24875 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24878: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24878 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24881: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24881 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24883: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24883 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24886: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24886 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24888: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24888 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24891: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24891 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24894: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24894 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24898: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24898 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24906: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24906 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_24940: {
    
        const int ConstantBusLimit = Subtarget->getConstantBusLimit(AMDGPU::V_ADD3_U32_e64);
        int ConstantBusUses = 0;
        for (unsigned i = 0; i < 3; ++i) {
          const RegisterBank *RegBank = RBI.getRegBank(Operands[i]->getReg(), MRI, TRI);
          if (RegBank->getID() == AMDGPU::SGPRRegBankID) {
            if (++ConstantBusUses > ConstantBusLimit)
              return false;
          }
        }
        return true;
      
    llvm_unreachable("anonymous_24940 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_25048: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25051: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25053: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25055: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25057: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25059: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25061: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_25975: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25977: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25981: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25983: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25985: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25987: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25991: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25993: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25995: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25997: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_25999: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26001: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26003: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26005: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26009: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26011: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26013: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26015: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26019: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26021: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26025: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26027: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26029: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26031: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26035: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26037: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26039: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26041: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26043: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26045: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26047: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26049: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26053: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26055: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26057: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26059: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26063: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26065: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26067: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26069: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26073: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26075: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26079: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26081: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26083: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26085: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26087: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26089: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26091: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26093: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26097: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26099: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26101: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26103: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26107: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26109: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26111: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26113: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26115: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26117: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26121: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26123: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26125: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26127: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26129: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26131: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26133: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26135: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26137: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26139: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26143: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26145: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26147: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26149: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26151: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26153: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26155: {
    
      return MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_26157: {
    
      return !MF.getInfo<SIMachineFunctionInfo>()->mayNeedAGPRs();
    
  }
  case GICXXPred_MI_Predicate_anonymous_36507: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36814: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36816: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36862: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36875: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36876: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36879: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36882: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36885: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36891: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36894: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36896: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36903: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36906: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36908: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36916: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36921: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36925: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_36929: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37075: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37078: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37083: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37087: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37133: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37135: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37174: {
    
        const SITargetLowering *TLI = static_cast<const SITargetLowering *>(
          MF.getSubtarget().getTargetLowering());
    
        return TLI->isCanonicalized(MI.getOperand(1).getReg(), MF) &&
          TLI->isCanonicalized(MI.getOperand(2).getReg(), MF);
      
    llvm_unreachable("anonymous_37174 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_37197: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37199: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37207: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37209: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37226: {
    
        const SITargetLowering *TLI = static_cast<const SITargetLowering *>(
          MF.getSubtarget().getTargetLowering());
    
        return TLI->isCanonicalized(MI.getOperand(1).getReg(), MF);
      
    llvm_unreachable("anonymous_37226 should have returned");
  }
  case GICXXPred_MI_Predicate_anonymous_37228: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37230: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37232: {
    return true;
  }
  case GICXXPred_MI_Predicate_anonymous_37234: {
    return true;
  }
  case GICXXPred_MI_Predicate_csh_mask_16: {
     return isUnneededShiftMask(MI, 4); 
  }
  case GICXXPred_MI_Predicate_csh_mask_32: {
     return isUnneededShiftMask(MI, 5); 
  }
  case GICXXPred_MI_Predicate_csh_mask_64: {
     return isUnneededShiftMask(MI, 6); 
  }
  case GICXXPred_MI_Predicate_fmaxnum_like_nnan: {
    
        return isKnownNeverNaN(MI.getOperand(0).getReg(), MRI);
      
  }
  case GICXXPred_MI_Predicate_fminnum_like_nnan: {
    
        return isKnownNeverNaN(MI.getOperand(0).getReg(), MRI);
      
  }
  case GICXXPred_MI_Predicate_is_canonicalized: {
    
        const SITargetLowering *TLI = static_cast<const SITargetLowering *>(
            MF.getSubtarget().getTargetLowering());
        const MachineOperand &Dst = MI.getOperand(0);
        assert(Dst.isDef());
        return TLI->isCanonicalized(Dst.getReg(), MF);
       
    llvm_unreachable("is_canonicalized should have returned");
  }
  case GICXXPred_MI_Predicate_load_align_less_than_4_local: {
    return (*MI.memoperands_begin())->getAlign() < 4;
  }
  case GICXXPred_MI_Predicate_load_align_less_than_4_local_m0: {
    return (*MI.memoperands_begin())->getAlign() < 4;
  }
  case GICXXPred_MI_Predicate_shl1_add: {
    return true;
  }
  case GICXXPred_MI_Predicate_shl2_add: {
    return true;
  }
  case GICXXPred_MI_Predicate_shl3_add: {
    return true;
  }
  case GICXXPred_MI_Predicate_shl4_add: {
    return true;
  }
  case GICXXPred_MI_Predicate_shl_0_to_4: {
    
        int64_t Imm = 0;
        if (!mi_match(MI.getOperand(2).getReg(), MRI, m_ICst(Imm)) &&
            !mi_match(MI.getOperand(2).getReg(), MRI, m_Copy(m_ICst(Imm))))
          return false;
        return (uint64_t)Imm <= 4;
      
    llvm_unreachable("shl_0_to_4 should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_extloadi8: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_extloadi8 should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_extloadi16: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_extloadi16 should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_load: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_load should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_prefetch: {
    
        return isInstrUniform(MI);
      
  }
  case GICXXPred_MI_Predicate_smrd_sextloadi8: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_sextloadi8 should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_sextloadi16: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_sextloadi16 should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_zextloadi8: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_zextloadi8 should have returned");
  }
  case GICXXPred_MI_Predicate_smrd_zextloadi16: {
    
        if (!MI.hasOneMemOperand())
          return false;
        if (!isInstrUniform(MI))
          return false;
    
        // FIXME: We should probably be caching this.
        SmallVector<GEPInfo, 4> AddrInfo;
        getAddrModeInfo(MI, MRI, AddrInfo);
    
        if (hasVgprParts(AddrInfo))
          return false;
        return true;
      
    llvm_unreachable("smrd_zextloadi16 should have returned");
  }
  case GICXXPred_MI_Predicate_store_align_less_than_4_local: {
    return (*MI.memoperands_begin())->getAlign() < 4;
  }
  case GICXXPred_MI_Predicate_store_align_less_than_4_local_m0: {
    return (*MI.memoperands_begin())->getAlign() < 4;
  }
  }
  llvm_unreachable("Unknown predicate");
  return false;
}
// PatFrag predicates.
enum {
  GICXXPred_I64_Predicate_IMMZeroBasedBitfieldMask = GICXXPred_Invalid + 1,
  GICXXPred_I64_Predicate_NegSubInlineConst32,
  GICXXPred_I64_Predicate_NegSubInlineIntConst16,
  GICXXPred_I64_Predicate_SIMM16bit,
  GICXXPred_I64_Predicate_ShiftAmt32Imm,
  GICXXPred_I64_Predicate_SupportedRoundMode,
  GICXXPred_I64_Predicate_i32imm_one,
  GICXXPred_I64_Predicate_i32imm_zero,
  GICXXPred_I64_Predicate_i64imm_32bit,
};
bool AMDGPUInstructionSelector::testImmPredicate_I64(unsigned PredicateID, int64_t Imm) const {
  switch (PredicateID) {
  case GICXXPred_I64_Predicate_IMMZeroBasedBitfieldMask: {
    
      return isMask_32(Imm);
    
  }
  case GICXXPred_I64_Predicate_NegSubInlineConst32: {
    
      return Imm < -16 && Imm >= -64;
    
  }
  case GICXXPred_I64_Predicate_NegSubInlineIntConst16: {
    
      return Imm < -16 && Imm >= -64;
    
  }
  case GICXXPred_I64_Predicate_SIMM16bit: {
    return isInt<16>(Imm) || isUInt<16>(Imm);
  }
  case GICXXPred_I64_Predicate_ShiftAmt32Imm: {
    
      return Imm < 32;
    
  }
  case GICXXPred_I64_Predicate_SupportedRoundMode: {
    
      return Imm == (int)RoundingMode::TowardZero ||
             Imm == (int)RoundingMode::NearestTiesToEven ||
             Imm == (int)RoundingMode::TowardPositive ||
             Imm == (int)RoundingMode::TowardNegative;
    
  }
  case GICXXPred_I64_Predicate_i32imm_one: {
    
      return Imm == 1;
    
  }
  case GICXXPred_I64_Predicate_i32imm_zero: {
    
      return Imm == 0;
    
  }
  case GICXXPred_I64_Predicate_i64imm_32bit: {
    
      return (Imm & 0xffffffffULL) == static_cast<uint64_t>(Imm);
    
  }
  }
  llvm_unreachable("Unknown predicate");
  return false;
}
// PatFrag predicates.
enum {
  GICXXPred_APFloat_Predicate_InlineImmFP32 = GICXXPred_Invalid + 1,
  GICXXPred_APFloat_Predicate_InlineImmFP64,
  GICXXPred_APFloat_Predicate_fpimm_neg_pow2_prefer_ldexp_f64,
  GICXXPred_APFloat_Predicate_fpimm_pos_pow2_prefer_ldexp_f64,
};
bool AMDGPUInstructionSelector::testImmPredicate_APFloat(unsigned PredicateID, const APFloat & Imm) const {
  switch (PredicateID) {
  case GICXXPred_APFloat_Predicate_InlineImmFP32: {
    
      return isInlineImmediate(Imm);
    
  }
  case GICXXPred_APFloat_Predicate_InlineImmFP64: {
    
      return isInlineImmediate(Imm);
    
  }
  case GICXXPred_APFloat_Predicate_fpimm_neg_pow2_prefer_ldexp_f64: {
    
        if (!Imm.isNegative())
          return false;
        int Exp = Imm.getExactLog2Abs();
        // Prefer leaving the FP inline immediates as they are.
        // 0.5, 1.0, 2.0, 4.0
    
        // For f64 ldexp is always better than materializing a 64-bit
        // constant.
        return Exp != INT_MIN && (Exp < -1 || Exp > 2);
      
    llvm_unreachable("fpimm_neg_pow2_prefer_ldexp_f64 should have returned");
  }
  case GICXXPred_APFloat_Predicate_fpimm_pos_pow2_prefer_ldexp_f64: {
    
        if (Imm.isNegative())
          return false;
    
        int Exp = Imm.getExactLog2Abs();
        // Prefer leaving the FP inline immediates as they are.
        // 0.5, 1.0, 2.0, 4.0
    
        // For f64 ldexp is always better than materializing a 64-bit
        // constant.
        return Exp != INT_MIN && (Exp < -1 || Exp > 2);
      
    llvm_unreachable("fpimm_pos_pow2_prefer_ldexp_f64 should have returned");
  }
  }
  llvm_unreachable("Unknown predicate");
  return false;
}
// PatFrag predicates.
enum {
  GICXXPred_APInt_Predicate_InlineImm64 = GICXXPred_Invalid + 1,
};
bool AMDGPUInstructionSelector::testImmPredicate_APInt(unsigned PredicateID, const APInt & Imm) const {
  switch (PredicateID) {
  case GICXXPred_APInt_Predicate_InlineImm64: {
    
      return isInlineImmediate(Imm);
    
  }
  }
  llvm_unreachable("Unknown predicate");
  return false;
}
bool AMDGPUInstructionSelector::testSimplePredicate(unsigned) const {
    llvm_unreachable("AMDGPUInstructionSelector does not support simple predicates!");
  return false;
}
// Custom renderers.
enum {
  GICR_Invalid,
  GICR_renderBitcastFPImm32,
  GICR_renderBitcastFPImm64,
  GICR_renderExtractCPol,
  GICR_renderExtractCpolSetGLC,
  GICR_renderExtractSWZ,
  GICR_renderFPPow2ToExponent,
  GICR_renderFrameIndex,
  GICR_renderNegateImm,
  GICR_renderOpSelTImm,
  GICR_renderPopcntImm,
  GICR_renderRoundMode,
  GICR_renderTruncTImm,
};
AMDGPUInstructionSelector::CustomRendererFn
AMDGPUInstructionSelector::CustomRenderers[] = {
  nullptr, // GICR_Invalid
  &AMDGPUInstructionSelector::renderBitcastFPImm32,
  &AMDGPUInstructionSelector::renderBitcastFPImm64,
  &AMDGPUInstructionSelector::renderExtractCPol,
  &AMDGPUInstructionSelector::renderExtractCpolSetGLC,
  &AMDGPUInstructionSelector::renderExtractSWZ,
  &AMDGPUInstructionSelector::renderFPPow2ToExponent,
  &AMDGPUInstructionSelector::renderFrameIndex,
  &AMDGPUInstructionSelector::renderNegateImm,
  &AMDGPUInstructionSelector::renderOpSelTImm,
  &AMDGPUInstructionSelector::renderPopcntImm,
  &AMDGPUInstructionSelector::renderRoundMode,
  &AMDGPUInstructionSelector::renderTruncTImm,
};

bool AMDGPUInstructionSelector::selectImpl(MachineInstr &I, CodeGenCoverage &CoverageInfo) const {
  const PredicateBitset AvailableFeatures = getAvailableFeatures();
  MachineIRBuilder B(I);
  State.MIs.clear();
  State.MIs.push_back(&I);

  if (executeMatchTable(*this, State, ExecInfo, B, getMatchTable(), TII, MF->getRegInfo(), TRI, RBI, AvailableFeatures, &CoverageInfo)) {
    return true;
  }

  return false;
}

bool AMDGPUInstructionSelector::runCustomAction(unsigned, const MatcherState&, NewMIVector &) const {
    llvm_unreachable("AMDGPUInstructionSelector does not support custom C++ actions!");
}
#if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__
#define GIMT_Encode2
#define GIMT_Encode4
#define GIMT_Encode8
#else
#define GIMT_Encode2
#define GIMT_Encode4
#define GIMT_Encode8
#endif
const uint8_t *AMDGPUInstructionSelector::getMatchTable() const {
  constexpr static uint8_t MatchTable0[] = {
    GIM_SwitchOpcode, /*MI*/0, /*[*/GIMT_Encode2(53), GIMT_Encode2(3660), /*)*//*default:*//*Label 167*/ GIMT_Encode4(587105),
    /*TargetOpcode::G_ADD*//*Label 0*/ GIMT_Encode4(14438),
    /*TargetOpcode::G_SUB*//*Label 1*/ GIMT_Encode4(18072),
    /*TargetOpcode::G_MUL*//*Label 2*/ GIMT_Encode4(18573), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_AND*//*Label 3*/ GIMT_Encode4(18859),
    /*TargetOpcode::G_OR*//*Label 4*/ GIMT_Encode4(19871),
    /*TargetOpcode::G_XOR*//*Label 5*/ GIMT_Encode4(34395), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FRAME_INDEX*//*Label 6*/ GIMT_Encode4(38819), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_BUILD_VECTOR*//*Label 7*/ GIMT_Encode4(38871), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_BITCAST*//*Label 8*/ GIMT_Encode4(41906), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_INTRINSIC_FPTRUNC_ROUND*//*Label 9*/ GIMT_Encode4(50379),
    /*TargetOpcode::G_INTRINSIC_TRUNC*//*Label 10*/ GIMT_Encode4(50471), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_INTRINSIC_ROUNDEVEN*//*Label 11*/ GIMT_Encode4(50799),
    /*TargetOpcode::G_READCYCLECOUNTER*//*Label 12*/ GIMT_Encode4(51127),
    /*TargetOpcode::G_READSTEADYCOUNTER*//*Label 13*/ GIMT_Encode4(51267),
    /*TargetOpcode::G_LOAD*//*Label 14*/ GIMT_Encode4(51325),
    /*TargetOpcode::G_SEXTLOAD*//*Label 15*/ GIMT_Encode4(126714),
    /*TargetOpcode::G_ZEXTLOAD*//*Label 16*/ GIMT_Encode4(130680), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_STORE*//*Label 17*/ GIMT_Encode4(134522), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_ATOMIC_CMPXCHG*//*Label 18*/ GIMT_Encode4(170428),
    /*TargetOpcode::G_ATOMICRMW_XCHG*//*Label 19*/ GIMT_Encode4(171554),
    /*TargetOpcode::G_ATOMICRMW_ADD*//*Label 20*/ GIMT_Encode4(173766),
    /*TargetOpcode::G_ATOMICRMW_SUB*//*Label 21*/ GIMT_Encode4(176278),
    /*TargetOpcode::G_ATOMICRMW_AND*//*Label 22*/ GIMT_Encode4(178790), GIMT_Encode4(0),
    /*TargetOpcode::G_ATOMICRMW_OR*//*Label 23*/ GIMT_Encode4(181302),
    /*TargetOpcode::G_ATOMICRMW_XOR*//*Label 24*/ GIMT_Encode4(183814),
    /*TargetOpcode::G_ATOMICRMW_MAX*//*Label 25*/ GIMT_Encode4(186326),
    /*TargetOpcode::G_ATOMICRMW_MIN*//*Label 26*/ GIMT_Encode4(188838),
    /*TargetOpcode::G_ATOMICRMW_UMAX*//*Label 27*/ GIMT_Encode4(191350),
    /*TargetOpcode::G_ATOMICRMW_UMIN*//*Label 28*/ GIMT_Encode4(193862),
    /*TargetOpcode::G_ATOMICRMW_FADD*//*Label 29*/ GIMT_Encode4(196374), GIMT_Encode4(0),
    /*TargetOpcode::G_ATOMICRMW_FMAX*//*Label 30*/ GIMT_Encode4(199233),
    /*TargetOpcode::G_ATOMICRMW_FMIN*//*Label 31*/ GIMT_Encode4(201879),
    /*TargetOpcode::G_ATOMICRMW_UINC_WRAP*//*Label 32*/ GIMT_Encode4(204525),
    /*TargetOpcode::G_ATOMICRMW_UDEC_WRAP*//*Label 33*/ GIMT_Encode4(207037), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FENCE*//*Label 34*/ GIMT_Encode4(209549),
    /*TargetOpcode::G_PREFETCH*//*Label 35*/ GIMT_Encode4(209568), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_INTRINSIC*//*Label 36*/ GIMT_Encode4(210004),
    /*TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS*//*Label 37*/ GIMT_Encode4(223248),
    /*TargetOpcode::G_INTRINSIC_CONVERGENT*//*Label 38*/ GIMT_Encode4(229370),
    /*TargetOpcode::G_INTRINSIC_CONVERGENT_W_SIDE_EFFECTS*//*Label 39*/ GIMT_Encode4(246429),
    /*TargetOpcode::G_ANYEXT*//*Label 40*/ GIMT_Encode4(247668),
    /*TargetOpcode::G_TRUNC*//*Label 41*/ GIMT_Encode4(248231),
    /*TargetOpcode::G_CONSTANT*//*Label 42*/ GIMT_Encode4(248356),
    /*TargetOpcode::G_FCONSTANT*//*Label 43*/ GIMT_Encode4(249082), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_SEXT*//*Label 44*/ GIMT_Encode4(249460), GIMT_Encode4(0),
    /*TargetOpcode::G_ZEXT*//*Label 45*/ GIMT_Encode4(250251),
    /*TargetOpcode::G_SHL*//*Label 46*/ GIMT_Encode4(251839),
    /*TargetOpcode::G_LSHR*//*Label 47*/ GIMT_Encode4(253029),
    /*TargetOpcode::G_ASHR*//*Label 48*/ GIMT_Encode4(253981), GIMT_Encode4(0),
    /*TargetOpcode::G_FSHR*//*Label 49*/ GIMT_Encode4(254933),
    /*TargetOpcode::G_ROTR*//*Label 50*/ GIMT_Encode4(254978), GIMT_Encode4(0),
    /*TargetOpcode::G_ICMP*//*Label 51*/ GIMT_Encode4(255010),
    /*TargetOpcode::G_FCMP*//*Label 52*/ GIMT_Encode4(256272), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_SELECT*//*Label 53*/ GIMT_Encode4(260393), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_UMULH*//*Label 54*/ GIMT_Encode4(260597),
    /*TargetOpcode::G_SMULH*//*Label 55*/ GIMT_Encode4(260706),
    /*TargetOpcode::G_UADDSAT*//*Label 56*/ GIMT_Encode4(260815),
    /*TargetOpcode::G_SADDSAT*//*Label 57*/ GIMT_Encode4(261081),
    /*TargetOpcode::G_USUBSAT*//*Label 58*/ GIMT_Encode4(261283),
    /*TargetOpcode::G_SSUBSAT*//*Label 59*/ GIMT_Encode4(261549), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FADD*//*Label 60*/ GIMT_Encode4(261751),
    /*TargetOpcode::G_FSUB*//*Label 61*/ GIMT_Encode4(263109),
    /*TargetOpcode::G_FMUL*//*Label 62*/ GIMT_Encode4(263473),
    /*TargetOpcode::G_FMA*//*Label 63*/ GIMT_Encode4(264859),
    /*TargetOpcode::G_FMAD*//*Label 64*/ GIMT_Encode4(266500), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FPOW*//*Label 65*/ GIMT_Encode4(267446), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FEXP2*//*Label 66*/ GIMT_Encode4(267564), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FLOG2*//*Label 67*/ GIMT_Encode4(267787), GIMT_Encode4(0),
    /*TargetOpcode::G_FLDEXP*//*Label 68*/ GIMT_Encode4(268010), GIMT_Encode4(0),
    /*TargetOpcode::G_FNEG*//*Label 69*/ GIMT_Encode4(268373),
    /*TargetOpcode::G_FPEXT*//*Label 70*/ GIMT_Encode4(270706),
    /*TargetOpcode::G_FPTRUNC*//*Label 71*/ GIMT_Encode4(271051),
    /*TargetOpcode::G_FPTOSI*//*Label 72*/ GIMT_Encode4(271734),
    /*TargetOpcode::G_FPTOUI*//*Label 73*/ GIMT_Encode4(272298),
    /*TargetOpcode::G_SITOFP*//*Label 74*/ GIMT_Encode4(272862),
    /*TargetOpcode::G_UITOFP*//*Label 75*/ GIMT_Encode4(273409), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FABS*//*Label 76*/ GIMT_Encode4(273956),
    /*TargetOpcode::G_FCOPYSIGN*//*Label 77*/ GIMT_Encode4(274958),
    /*TargetOpcode::G_IS_FPCLASS*//*Label 78*/ GIMT_Encode4(275515),
    /*TargetOpcode::G_FCANONICALIZE*//*Label 79*/ GIMT_Encode4(275784),
    /*TargetOpcode::G_FMINNUM*//*Label 80*/ GIMT_Encode4(276984),
    /*TargetOpcode::G_FMAXNUM*//*Label 81*/ GIMT_Encode4(313323),
    /*TargetOpcode::G_FMINNUM_IEEE*//*Label 82*/ GIMT_Encode4(349662),
    /*TargetOpcode::G_FMAXNUM_IEEE*//*Label 83*/ GIMT_Encode4(386001),
    /*TargetOpcode::G_FMINIMUM*//*Label 84*/ GIMT_Encode4(422340),
    /*TargetOpcode::G_FMAXIMUM*//*Label 85*/ GIMT_Encode4(423810), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_GET_FPMODE*//*Label 86*/ GIMT_Encode4(425280), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_PTR_ADD*//*Label 87*/ GIMT_Encode4(425404), GIMT_Encode4(0),
    /*TargetOpcode::G_SMIN*//*Label 88*/ GIMT_Encode4(425481),
    /*TargetOpcode::G_SMAX*//*Label 89*/ GIMT_Encode4(428585),
    /*TargetOpcode::G_UMIN*//*Label 90*/ GIMT_Encode4(431851),
    /*TargetOpcode::G_UMAX*//*Label 91*/ GIMT_Encode4(434955),
    /*TargetOpcode::G_ABS*//*Label 92*/ GIMT_Encode4(438059), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_BR*//*Label 93*/ GIMT_Encode4(438196), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_CTTZ_ZERO_UNDEF*//*Label 94*/ GIMT_Encode4(438230), GIMT_Encode4(0),
    /*TargetOpcode::G_CTLZ_ZERO_UNDEF*//*Label 95*/ GIMT_Encode4(438329),
    /*TargetOpcode::G_CTPOP*//*Label 96*/ GIMT_Encode4(438431),
    /*TargetOpcode::G_BSWAP*//*Label 97*/ GIMT_Encode4(438643),
    /*TargetOpcode::G_BITREVERSE*//*Label 98*/ GIMT_Encode4(439590),
    /*TargetOpcode::G_FCEIL*//*Label 99*/ GIMT_Encode4(439838), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_FSQRT*//*Label 100*/ GIMT_Encode4(440166),
    /*TargetOpcode::G_FFLOOR*//*Label 101*/ GIMT_Encode4(440458), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_STRICT_FADD*//*Label 102*/ GIMT_Encode4(440972),
    /*TargetOpcode::G_STRICT_FSUB*//*Label 103*/ GIMT_Encode4(441934),
    /*TargetOpcode::G_STRICT_FMUL*//*Label 104*/ GIMT_Encode4(442298), GIMT_Encode4(0), GIMT_Encode4(0),
    /*TargetOpcode::G_STRICT_FMA*//*Label 105*/ GIMT_Encode4(443566), GIMT_Encode4(0),
    /*TargetOpcode::G_STRICT_FLDEXP*//*Label 106*/ GIMT_Encode4(444454), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*AMDGPU::G_AMDGPU_ATOMIC_CMPXCHG*//*Label 107*/ GIMT_Encode4(444817),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_ADD*//*Label 108*/ GIMT_Encode4(447097),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_AND*//*Label 109*/ GIMT_Encode4(449481),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_CMPSWAP*//*Label 110*/ GIMT_Encode4(451865),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_COND_SUB_U32*//*Label 111*/ GIMT_Encode4(456159),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_DEC*//*Label 112*/ GIMT_Encode4(456776),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_FADD*//*Label 113*/ GIMT_Encode4(459160),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_FMAX*//*Label 114*/ GIMT_Encode4(463986),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_FMIN*//*Label 115*/ GIMT_Encode4(467602),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_INC*//*Label 116*/ GIMT_Encode4(471218),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_OR*//*Label 117*/ GIMT_Encode4(473602),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_SMAX*//*Label 118*/ GIMT_Encode4(475986),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_SMIN*//*Label 119*/ GIMT_Encode4(478370),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_SUB*//*Label 120*/ GIMT_Encode4(480754),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_SWAP*//*Label 121*/ GIMT_Encode4(483138),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_UMAX*//*Label 122*/ GIMT_Encode4(486682),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_UMIN*//*Label 123*/ GIMT_Encode4(489066),
    /*AMDGPU::G_AMDGPU_BUFFER_ATOMIC_XOR*//*Label 124*/ GIMT_Encode4(491450),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD*//*Label 125*/ GIMT_Encode4(493834),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_FORMAT*//*Label 126*/ GIMT_Encode4(509756),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_FORMAT_D16*//*Label 127*/ GIMT_Encode4(514664),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_FORMAT_TFE*//*Label 128*/ GIMT_Encode4(522409),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_SBYTE*//*Label 129*/ GIMT_Encode4(524929),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_SBYTE_TFE*//*Label 130*/ GIMT_Encode4(525535),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_SSHORT*//*Label 131*/ GIMT_Encode4(526141),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_SSHORT_TFE*//*Label 132*/ GIMT_Encode4(526747),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_TFE*//*Label 133*/ GIMT_Encode4(527353),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_UBYTE*//*Label 134*/ GIMT_Encode4(529873),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_UBYTE_TFE*//*Label 135*/ GIMT_Encode4(530479),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_USHORT*//*Label 136*/ GIMT_Encode4(531085),
    /*AMDGPU::G_AMDGPU_BUFFER_LOAD_USHORT_TFE*//*Label 137*/ GIMT_Encode4(531691),
    /*AMDGPU::G_AMDGPU_BUFFER_STORE*//*Label 138*/ GIMT_Encode4(532297),
    /*AMDGPU::G_AMDGPU_BUFFER_STORE_BYTE*//*Label 139*/ GIMT_Encode4(548163),
    /*AMDGPU::G_AMDGPU_BUFFER_STORE_FORMAT*//*Label 140*/ GIMT_Encode4(548765),
    /*AMDGPU::G_AMDGPU_BUFFER_STORE_FORMAT_D16*//*Label 141*/ GIMT_Encode4(554253),
    /*AMDGPU::G_AMDGPU_BUFFER_STORE_SHORT*//*Label 142*/ GIMT_Encode4(560518),
    /*AMDGPU::G_AMDGPU_CLAMP*//*Label 143*/ GIMT_Encode4(561120),
    /*AMDGPU::G_AMDGPU_CVT_F32_UBYTE0*//*Label 144*/ GIMT_Encode4(562059),
    /*AMDGPU::G_AMDGPU_CVT_F32_UBYTE1*//*Label 145*/ GIMT_Encode4(562104),
    /*AMDGPU::G_AMDGPU_CVT_F32_UBYTE2*//*Label 146*/ GIMT_Encode4(562149),
    /*AMDGPU::G_AMDGPU_CVT_F32_UBYTE3*//*Label 147*/ GIMT_Encode4(562194),
    /*AMDGPU::G_AMDGPU_CVT_PK_I16_I32*//*Label 148*/ GIMT_Encode4(562239),
    /*AMDGPU::G_AMDGPU_FFBH_U32*//*Label 149*/ GIMT_Encode4(562269),
    /*AMDGPU::G_AMDGPU_FFBL_B32*//*Label 150*/ GIMT_Encode4(562371),
    /*AMDGPU::G_AMDGPU_FMAX_LEGACY*//*Label 151*/ GIMT_Encode4(562470),
    /*AMDGPU::G_AMDGPU_FMED3*//*Label 152*/ GIMT_Encode4(562543),
    /*AMDGPU::G_AMDGPU_FMIN_LEGACY*//*Label 153*/ GIMT_Encode4(562739), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*AMDGPU::G_AMDGPU_RCP_IFLAG*//*Label 154*/ GIMT_Encode4(562812),
    /*AMDGPU::G_AMDGPU_SMED3*//*Label 155*/ GIMT_Encode4(562862),
    /*AMDGPU::G_AMDGPU_S_BUFFER_LOAD*//*Label 156*/ GIMT_Encode4(563010),
    /*AMDGPU::G_AMDGPU_S_BUFFER_LOAD_SBYTE*//*Label 157*/ GIMT_Encode4(565312),
    /*AMDGPU::G_AMDGPU_S_BUFFER_LOAD_SSHORT*//*Label 158*/ GIMT_Encode4(565450),
    /*AMDGPU::G_AMDGPU_S_BUFFER_LOAD_UBYTE*//*Label 159*/ GIMT_Encode4(565588),
    /*AMDGPU::G_AMDGPU_S_BUFFER_LOAD_USHORT*//*Label 160*/ GIMT_Encode4(565726),
    /*AMDGPU::G_AMDGPU_S_BUFFER_PREFETCH*//*Label 161*/ GIMT_Encode4(565864), GIMT_Encode4(0), GIMT_Encode4(0),
    /*AMDGPU::G_AMDGPU_TBUFFER_LOAD_FORMAT*//*Label 162*/ GIMT_Encode4(565971),
    /*AMDGPU::G_AMDGPU_TBUFFER_LOAD_FORMAT_D16*//*Label 163*/ GIMT_Encode4(571455),
    /*AMDGPU::G_AMDGPU_TBUFFER_STORE_FORMAT*//*Label 164*/ GIMT_Encode4(576884),
    /*AMDGPU::G_AMDGPU_TBUFFER_STORE_FORMAT_D16*//*Label 165*/ GIMT_Encode4(582352),
    /*AMDGPU::G_AMDGPU_UMED3*//*Label 166*/ GIMT_Encode4(586957),
    // Label 0: @14438
    GIM_SwitchType, /*MI*/0, /*Op*/0, /*[*/GIMT_Encode2(7), GIMT_Encode2(12), /*)*//*default:*//*Label 173*/ GIMT_Encode4(18071),
    /*GILLT_s1*//*Label 168*/ GIMT_Encode4(14469),
    /*GILLT_s16*//*Label 169*/ GIMT_Encode4(14588),
    /*GILLT_s32*//*Label 170*/ GIMT_Encode4(14861),
    /*GILLT_s64*//*Label 171*/ GIMT_Encode4(17594),
    /*GILLT_v2s16*//*Label 172*/ GIMT_Encode4(17801),
    // Label 168: @14469
    GIM_Try, /*On fail goto*//*Label 174*/ GIMT_Encode4(14587),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s1,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s1,
      GIM_Try, /*On fail goto*//*Label 175*/ GIMT_Encode4(14508), // Rule ID 7266 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        // (add:{ *:[i1] } i1:{ *:[i1] }:$src0, -1:{ *:[i1] })  =>  (S_NOT_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOT_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7266,
        GIR_EraseRootFromParent_Done,
      // Label 175: @14508
      GIM_Try, /*On fail goto*//*Label 176*/ GIMT_Encode4(14536), // Rule ID 7273 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        // (add:{ *:[i1] } i1:{ *:[i1] }:$src0, -1:{ *:[i1] })  =>  (S_NOT_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOT_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7273,
        GIR_EraseRootFromParent_Done,
      // Label 176: @14536
      GIM_Try, /*On fail goto*//*Label 177*/ GIMT_Encode4(14561), // Rule ID 7264 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        // (add:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_XOR_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7264,
        GIR_Done,
      // Label 177: @14561
      GIM_Try, /*On fail goto*//*Label 178*/ GIMT_Encode4(14586), // Rule ID 7271 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        // (add:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_XOR_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7271,
        GIR_Done,
      // Label 178: @14586
      GIM_Reject,
    // Label 174: @14587
    GIM_Reject,
    // Label 169: @14588
    GIM_Try, /*On fail goto*//*Label 179*/ GIMT_Encode4(14860),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s16,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
      GIM_Try, /*On fail goto*//*Label 180*/ GIMT_Encode4(14642), // Rule ID 2259 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_isGFX8GFX9),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        GIM_CheckI64ImmPredicate, /*MI*/1, /*Predicate*/GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineIntConst16),
        // MIs[1] Operand 1
        // No operand predicates
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i16] } i16:{ *:[i16] }:$src0, (imm:{ *:[i16] })<<P:Predicate_NegSubInlineIntConst16>><<X:NegateImm>>:$src1)  =>  (V_SUB_U16_e64:{ *:[i16] } VSrc_b16:{ *:[i16] }:$src0, (NegateImm:{ *:[i16 i32 bf16 f16 f32 v2i16 v2f16 v2bf16] } (imm:{ *:[i16] })<<P:Predicate_NegSubInlineIntConst16>>:$src1))
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_CustomRenderer, /*InsnID*/0, /*OldInsnID*/1, /*Renderer*/GIMT_Encode2(GICR_renderNegateImm), // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2259,
        GIR_EraseRootFromParent_Done,
      // Label 180: @14642
      GIM_Try, /*On fail goto*//*Label 181*/ GIMT_Encode4(14690), // Rule ID 2362 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        GIM_CheckI64ImmPredicate, /*MI*/1, /*Predicate*/GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineIntConst16),
        // MIs[1] Operand 1
        // No operand predicates
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i16] } i16:{ *:[i16] }:$src0, (imm:{ *:[i16] })<<P:Predicate_NegSubInlineIntConst16>><<X:NegateImm>>:$src1)  =>  (V_SUB_NC_U16_e64:{ *:[i16] } 0:{ *:[i32] }, VSrc_b16:{ *:[i16] }:$src0, 0:{ *:[i32] }, (NegateImm:{ *:[i16 i32 bf16 f16 f32 v2i16 v2f16 v2bf16] } (imm:{ *:[i16] })<<P:Predicate_NegSubInlineIntConst16>>:$src1), 0:{ *:[i1] }, 0:{ *:[i32] })
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_NC_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_CustomRenderer, /*InsnID*/0, /*OldInsnID*/1, /*Renderer*/GIMT_Encode2(GICR_renderNegateImm), // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2362,
        GIR_EraseRootFromParent_Done,
      // Label 181: @14690
      GIM_Try, /*On fail goto*//*Label 182*/ GIMT_Encode4(14736), // Rule ID 2297 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_isGFX6GFX7GFX8GFX9),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s16,
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i16] } (mul:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1), i16:{ *:[i16] }:$src2)  =>  (V_MAD_U16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1, i16:{ *:[i16] }:$src2, 0:{ *:[i1] })
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2297,
        GIR_EraseRootFromParent_Done,
      // Label 182: @14736
      GIM_Try, /*On fail goto*//*Label 183*/ GIMT_Encode4(14782), // Rule ID 8222 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_isGFX6GFX7GFX8GFX9),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s16,
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i16] } i16:{ *:[i16] }:$src2, (mul:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1))  =>  (V_MAD_U16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1, i16:{ *:[i16] }:$src2, 0:{ *:[i1] })
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8222,
        GIR_EraseRootFromParent_Done,
      // Label 183: @14782
      GIM_Try, /*On fail goto*//*Label 184*/ GIMT_Encode4(14837), // Rule ID 1023 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
        // (add:{ *:[i16] } (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_ADD_NC_U16_e64:{ *:[i16] } i32:{ *:[i32] }:$src0_modifiers, i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD_NC_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1023,
        GIR_EraseRootFromParent_Done,
      // Label 184: @14837
      GIM_Try, /*On fail goto*//*Label 185*/ GIMT_Encode4(14859), // Rule ID 915 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX8GFX9),
        // (add:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_ADD_U16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 915,
        GIR_EraseRootFromParent_Done,
      // Label 185: @14859
      GIM_Reject,
    // Label 179: @14860
    GIM_Reject,
    // Label 170: @14861
    GIM_Try, /*On fail goto*//*Label 186*/ GIMT_Encode4(17593),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
      GIM_Try, /*On fail goto*//*Label 187*/ GIMT_Encode4(14965), // Rule ID 2314 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:1:x
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordNamedOperand, /*MI*/2, /*Op*/1, /*StoreIdx*/1, // Name : pred:1:y
        GIM_RecordInsn, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        // MIs[3] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/2, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:1:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (add:{ *:[i32] } (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:1:x, (and:{ *:[i32] } i32:{ *:[i32] }:$src1:$pred:1:y, (imm:{ *:[i32] }))<<P:Predicate_csh_mask_32>>)<<P:Predicate_anonymous_24876>>, i32:{ *:[i32] }:$src2:$pred:1:z)<<P:1:Predicate_anonymous_24875>>  =>  (V_LSHL_ADD_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/2, /*OpIdx*/1, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2314,
        GIR_EraseRootFromParent_Done,
      // Label 187: @14965
      GIM_Try, /*On fail goto*//*Label 188*/ GIMT_Encode4(15058), // Rule ID 8225 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:1:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:1:x
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordNamedOperand, /*MI*/2, /*Op*/1, /*StoreIdx*/1, // Name : pred:1:y
        GIM_RecordInsn, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        // MIs[3] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/2, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:1:z, (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:1:x, (and:{ *:[i32] } i32:{ *:[i32] }:$src1:$pred:1:y, (imm:{ *:[i32] }))<<P:Predicate_csh_mask_32>>)<<P:Predicate_anonymous_24876>>)<<P:1:Predicate_anonymous_24875>>  =>  (V_LSHL_ADD_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/2, /*OpIdx*/1, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8225,
        GIR_EraseRootFromParent_Done,
      // Label 188: @15058
      GIM_Try, /*On fail goto*//*Label 189*/ GIMT_Encode4(15123), // Rule ID 84 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 1,
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl1_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 1:{ *:[i32] })<<P:Predicate_shl_oneuse>>, SSrc_b32:{ *:[i32] }:$src1)<<P:Predicate_shl1_add>>  =>  (S_LSHL1_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL1_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 84,
        GIR_EraseRootFromParent_Done,
      // Label 189: @15123
      GIM_Try, /*On fail goto*//*Label 190*/ GIMT_Encode4(15188), // Rule ID 85 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 2,
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl2_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 2:{ *:[i32] })<<P:Predicate_shl_oneuse>>, SSrc_b32:{ *:[i32] }:$src1)<<P:Predicate_shl2_add>>  =>  (S_LSHL2_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL2_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 85,
        GIR_EraseRootFromParent_Done,
      // Label 190: @15188
      GIM_Try, /*On fail goto*//*Label 191*/ GIMT_Encode4(15253), // Rule ID 86 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 3,
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl3_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 3:{ *:[i32] })<<P:Predicate_shl_oneuse>>, SSrc_b32:{ *:[i32] }:$src1)<<P:Predicate_shl3_add>>  =>  (S_LSHL3_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL3_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 86,
        GIR_EraseRootFromParent_Done,
      // Label 191: @15253
      GIM_Try, /*On fail goto*//*Label 192*/ GIMT_Encode4(15318), // Rule ID 87 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 4,
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl4_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 4:{ *:[i32] })<<P:Predicate_shl_oneuse>>, SSrc_b32:{ *:[i32] }:$src1)<<P:Predicate_shl4_add>>  =>  (S_LSHL4_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL4_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 87,
        GIR_EraseRootFromParent_Done,
      // Label 192: @15318
      GIM_Try, /*On fail goto*//*Label 193*/ GIMT_Encode4(15383), // Rule ID 8142 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl1_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src1, (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 1:{ *:[i32] })<<P:Predicate_shl_oneuse>>)<<P:Predicate_shl1_add>>  =>  (S_LSHL1_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL1_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8142,
        GIR_EraseRootFromParent_Done,
      // Label 193: @15383
      GIM_Try, /*On fail goto*//*Label 194*/ GIMT_Encode4(15448), // Rule ID 8143 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl2_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src1, (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 2:{ *:[i32] })<<P:Predicate_shl_oneuse>>)<<P:Predicate_shl2_add>>  =>  (S_LSHL2_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL2_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8143,
        GIR_EraseRootFromParent_Done,
      // Label 194: @15448
      GIM_Try, /*On fail goto*//*Label 195*/ GIMT_Encode4(15513), // Rule ID 8144 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 3,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl3_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src1, (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 3:{ *:[i32] })<<P:Predicate_shl_oneuse>>)<<P:Predicate_shl3_add>>  =>  (S_LSHL3_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL3_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8144,
        GIR_EraseRootFromParent_Done,
      // Label 195: @15513
      GIM_Try, /*On fail goto*//*Label 196*/ GIMT_Encode4(15578), // Rule ID 8145 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckRegBankForClass, /*MI*/1, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, 4,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl4_add),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src1, (shl:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, 4:{ *:[i32] })<<P:Predicate_shl_oneuse>>)<<P:Predicate_shl4_add>>  =>  (S_LSHL4_ADD_U32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_LSHL4_ADD_U32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8145,
        GIR_EraseRootFromParent_Done,
      // Label 196: @15578
      GIM_Try, /*On fail goto*//*Label 197*/ GIMT_Encode4(15743), // Rule ID 8235 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:11:z
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:11:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:11:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } VGPR_32:{ *:[i32] }:$src2:$pred:11:z, (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:11:x, i32:{ *:[i32] }:$src1:$pred:11:y)<<P:Predicate_anonymous_24907>>)<<P:11:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/1, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 8235,
        GIR_EraseRootFromParent_Done,
      // Label 197: @15743
      GIM_Try, /*On fail goto*//*Label 198*/ GIMT_Encode4(15908), // Rule ID 8239 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:14:z
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:14:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:14:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } VGPR_32:{ *:[i32] }:$src2:$pred:14:z, (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:14:x, i32:{ *:[i32] }:$src1:$pred:14:y)<<P:Predicate_anonymous_24907>>)<<P:14:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_gfx11_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/1, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 8239,
        GIR_EraseRootFromParent_Done,
      // Label 198: @15908
      GIM_Try, /*On fail goto*//*Label 199*/ GIMT_Encode4(15973), // Rule ID 2317 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_ADD),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:3:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:3:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:3:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24881),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (add:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:3:x, i32:{ *:[i32] }:$src1:$pred:3:y)<<P:Predicate_anonymous_24879>>, i32:{ *:[i32] }:$src2:$pred:3:z)<<P:3:Predicate_anonymous_24881>>  =>  (V_ADD3_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD3_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2317,
        GIR_EraseRootFromParent_Done,
      // Label 199: @15973
      GIM_Try, /*On fail goto*//*Label 200*/ GIMT_Encode4(16138), // Rule ID 2332 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:11:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:11:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:11:z
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:11:x, i32:{ *:[i32] }:$src1:$pred:11:y)<<P:Predicate_anonymous_24907>>, VGPR_32:{ *:[i32] }:$src2:$pred:11:z)<<P:11:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/2, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 2332,
        GIR_EraseRootFromParent_Done,
      // Label 200: @16138
      GIM_Try, /*On fail goto*//*Label 201*/ GIMT_Encode4(16303), // Rule ID 2337 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:14:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:14:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:14:z
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:14:x, i32:{ *:[i32] }:$src1:$pred:14:y)<<P:Predicate_anonymous_24907>>, VGPR_32:{ *:[i32] }:$src2:$pred:14:z)<<P:14:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_gfx11_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/2, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 2337,
        GIR_EraseRootFromParent_Done,
      // Label 201: @16303
      GIM_Try, /*On fail goto*//*Label 202*/ GIMT_Encode4(16464), // Rule ID 2331 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:10:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:10:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:10:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:10:x, i32:{ *:[i32] }:$src1:$pred:10:y)<<P:Predicate_anonymous_24907>>, i32:{ *:[i32] }:$src2:$pred:10:z)<<P:10:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } SReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/2, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 2331,
        GIR_EraseRootFromParent_Done,
      // Label 202: @16464
      GIM_Try, /*On fail goto*//*Label 203*/ GIMT_Encode4(16625), // Rule ID 2336 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:13:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:13:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:13:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:13:x, i32:{ *:[i32] }:$src1:$pred:13:y)<<P:Predicate_anonymous_24907>>, i32:{ *:[i32] }:$src2:$pred:13:z)<<P:13:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_gfx11_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } SReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/2, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 2336,
        GIR_EraseRootFromParent_Done,
      // Label 203: @16625
      GIM_Try, /*On fail goto*//*Label 204*/ GIMT_Encode4(16690), // Rule ID 2313 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:1:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:1:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:1:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:1:x, i32:{ *:[i32] }:$src1:$pred:1:y)<<P:Predicate_anonymous_24876>>, i32:{ *:[i32] }:$src2:$pred:1:z)<<P:1:Predicate_anonymous_24875>>  =>  (V_LSHL_ADD_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2313,
        GIR_EraseRootFromParent_Done,
      // Label 204: @16690
      GIM_Try, /*On fail goto*//*Label 205*/ GIMT_Encode4(16755), // Rule ID 2323 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:8:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:8:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:8:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24894),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:8:x, i32:{ *:[i32] }:$src1:$pred:8:y)<<P:Predicate_anonymous_24895>>, i32:{ *:[i32] }:$src2:$pred:8:z)<<P:8:Predicate_anonymous_24894>>  =>  (V_XAD_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XAD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2323,
        GIR_EraseRootFromParent_Done,
      // Label 205: @16755
      GIM_Try, /*On fail goto*//*Label 206*/ GIMT_Encode4(16820), // Rule ID 8226 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:3:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_ADD),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:3:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:3:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24881),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:3:z, (add:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:3:x, i32:{ *:[i32] }:$src1:$pred:3:y)<<P:Predicate_anonymous_24879>>)<<P:3:Predicate_anonymous_24881>>  =>  (V_ADD3_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD3_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8226,
        GIR_EraseRootFromParent_Done,
      // Label 206: @16820
      GIM_Try, /*On fail goto*//*Label 207*/ GIMT_Encode4(16981), // Rule ID 8234 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasNotMADIntraFwdBug_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:10:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:10:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:10:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:10:z, (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:10:x, i32:{ *:[i32] }:$src1:$pred:10:y)<<P:Predicate_anonymous_24907>>)<<P:10:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } SReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/1, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 8234,
        GIR_EraseRootFromParent_Done,
      // Label 207: @16981
      GIM_Try, /*On fail goto*//*Label 208*/ GIMT_Encode4(17142), // Rule ID 8238 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasMADIntraFwdBug_isGFX11Only),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:13:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:13:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:13:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24906),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:13:z, (mul:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:13:x, i32:{ *:[i32] }:$src1:$pred:13:y)<<P:Predicate_anonymous_24907>>)<<P:13:Predicate_anonymous_24906>>  =>  (EXTRACT_SUBREG:{ *:[i32] } (V_MAD_U64_U32_gfx11_e64:{ *:[i64] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1, (REG_SEQUENCE:{ *:[i64] } SReg_64:{ *:[i32] }, ?:{ *:[i32] }:$src2, sub0:{ *:[i32] }, (IMPLICIT_DEF:{ *:[i32] }), sub1:{ *:[i32] }), 0:{ *:[i1] }), sub0:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s64,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::IMPLICIT_DEF),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/0, /*OpIdx*/1, // src2
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_AddImm8, /*InsnID*/2, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::SReg_32_XEXEC_HI_and_SReg_32_XM0RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/3, GIMT_Encode2(AMDGPU::SReg_32_XM0RegClassID),
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s1,
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s64,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_MAD_U64_U32_gfx11_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddImm8, /*InsnID*/1, /*Imm*/0,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddTempSubRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(0), GIMT_Encode2(AMDGPU::sub0),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        // GIR_Coverage, 8238,
        GIR_EraseRootFromParent_Done,
      // Label 208: @17142
      GIM_Try, /*On fail goto*//*Label 209*/ GIMT_Encode4(17207), // Rule ID 8224 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:1:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:1:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:1:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24875),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:1:z, (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:1:x, i32:{ *:[i32] }:$src1:$pred:1:y)<<P:Predicate_anonymous_24876>>)<<P:1:Predicate_anonymous_24875>>  =>  (V_LSHL_ADD_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_ADD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8224,
        GIR_EraseRootFromParent_Done,
      // Label 209: @17207
      GIM_Try, /*On fail goto*//*Label 210*/ GIMT_Encode4(17272), // Rule ID 8231 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:8:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:8:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:8:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24894),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:8:z, (xor:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:8:x, i32:{ *:[i32] }:$src1:$pred:8:y)<<P:Predicate_anonymous_24895>>)<<P:8:Predicate_anonymous_24894>>  =>  (V_XAD_U32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XAD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8231,
        GIR_EraseRootFromParent_Done,
      // Label 210: @17272
      GIM_Try, /*On fail goto*//*Label 211*/ GIMT_Encode4(17316), // Rule ID 7392 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        GIM_CheckI64ImmPredicate, /*MI*/1, /*Predicate*/GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineConst32),
        // MIs[1] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18637),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src0, (imm:{ *:[i32] })<<P:Predicate_NegSubInlineConst32>><<X:NegateImm>>:$src1)<<P:Predicate_anonymous_18637>>  =>  (S_SUB_I32:{ *:[i32] }:{ *:[i1] } SReg_32:{ *:[i32] }:$src0, (NegateImm:{ *:[i1 i16 i32 bf16 f16 f32 v2i16 v2f16 v2bf16] } (imm:{ *:[i32] })<<P:Predicate_NegSubInlineConst32>>:$src1))
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_SUB_I32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_CustomRenderer, /*InsnID*/0, /*OldInsnID*/1, /*Renderer*/GIMT_Encode2(GICR_renderNegateImm), // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7392,
        GIR_EraseRootFromParent_Done,
      // Label 211: @17316
      GIM_Try, /*On fail goto*//*Label 212*/ GIMT_Encode4(17363), // Rule ID 7393 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasAddNoCarryInsts),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        GIM_CheckI64ImmPredicate, /*MI*/1, /*Predicate*/GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineConst32),
        // MIs[1] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src0, (imm:{ *:[i32] })<<P:Predicate_NegSubInlineConst32>><<X:NegateImm>>:$src1)<<P:Predicate_anonymous_24002>>  =>  (V_SUB_U32_e64:{ *:[i32] } VS_32:{ *:[i32] }:$src0, (NegateImm:{ *:[i16 i32 bf16 f16 f32 v2i16 v2f16 v2bf16] } (imm:{ *:[i32] })<<P:Predicate_NegSubInlineConst32>>:$src1))
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_CustomRenderer, /*InsnID*/0, /*OldInsnID*/1, /*Renderer*/GIMT_Encode2(GICR_renderNegateImm), // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7393,
        GIR_EraseRootFromParent_Done,
      // Label 212: @17363
      GIM_Try, /*On fail goto*//*Label 213*/ GIMT_Encode4(17418), // Rule ID 7394 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_NotHasAddNoCarryInsts),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        GIM_CheckI64ImmPredicate, /*MI*/1, /*Predicate*/GIMT_Encode2(GICXXPred_I64_Predicate_NegSubInlineConst32),
        // MIs[1] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src0, (imm:{ *:[i32] })<<P:Predicate_NegSubInlineConst32>><<X:NegateImm>>:$src1)<<P:Predicate_anonymous_24002>>  =>  (V_SUB_CO_U32_e64:{ *:[i32] }:{ *:[i1] } VS_32:{ *:[i32] }:$src0, (NegateImm:{ *:[i16 i32 bf16 f16 f32 v2i16 v2f16 v2bf16] } (imm:{ *:[i32] })<<P:Predicate_NegSubInlineConst32>>:$src1))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_CO_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddTempRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_CustomRenderer, /*InsnID*/0, /*OldInsnID*/1, /*Renderer*/GIMT_Encode2(GICR_renderNegateImm), // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7394,
        GIR_EraseRootFromParent_Done,
      // Label 213: @17418
      GIM_Try, /*On fail goto*//*Label 214*/ GIMT_Encode4(17452), // Rule ID 45 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18637),
        // (add:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_18637>>  =>  (S_ADD_I32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_ADD_I32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 45,
        GIR_Done,
      // Label 214: @17452
      GIM_Try, /*On fail goto*//*Label 215*/ GIMT_Encode4(17482), // Rule ID 2223 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasAddNoCarryInsts),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_24002>>  =>  (V_ADD_U32_e64:{ *:[i32] } ?:{ *:[i32] }:$src0, ?:{ *:[i32] }:$src1, 0:{ *:[i1] })
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2223,
        GIR_EraseRootFromParent_Done,
      // Label 215: @17482
      GIM_Try, /*On fail goto*//*Label 216*/ GIMT_Encode4(17520), // Rule ID 2225 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX6GFX7GFX8GFX9),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_24002>>  =>  (V_ADD_CO_U32_e64:{ *:[i32] }:{ *:[i1] } ?:{ *:[i32] }:$src0, ?:{ *:[i32] }:$src1, 0:{ *:[i1] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD_CO_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddTempRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2225,
        GIR_EraseRootFromParent_Done,
      // Label 216: @17520
      GIM_Try, /*On fail goto*//*Label 217*/ GIMT_Encode4(17556), // Rule ID 834 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CTPOP),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } (ctpop:{ *:[i32] } i32:{ *:[i32] }:$src0), i32:{ *:[i32] }:$src1)  =>  (V_BCNT_U32_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BCNT_U32_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 834,
        GIR_EraseRootFromParent_Done,
      // Label 217: @17556
      GIM_Try, /*On fail goto*//*Label 218*/ GIMT_Encode4(17592), // Rule ID 8157 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_CTPOP),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i32] } i32:{ *:[i32] }:$src1, (ctpop:{ *:[i32] } i32:{ *:[i32] }:$src0))  =>  (V_BCNT_U32_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BCNT_U32_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8157,
        GIR_EraseRootFromParent_Done,
      // Label 218: @17592
      GIM_Reject,
    // Label 186: @17593
    GIM_Reject,
    // Label 171: @17594
    GIM_Try, /*On fail goto*//*Label 219*/ GIMT_Encode4(17800),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s64,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
      GIM_Try, /*On fail goto*//*Label 220*/ GIMT_Encode4(17674), // Rule ID 2325 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX940Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:9:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:9:y
        GIM_CheckCxxInsnPredicate, /*MI*/1, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl_0_to_4),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:9:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24898),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i64] } (shl:{ *:[i64] } i64:{ *:[i64] }:$src0:$pred:9:x, i32:{ *:[i32] }:$src1:$pred:9:y)<<P:Predicate_shl_0_to_4>><<P:Predicate_anonymous_24899>>, i64:{ *:[i64] }:$src2:$pred:9:z)<<P:9:Predicate_anonymous_24898>>  =>  (V_LSHL_ADD_U64_e64:{ *:[i64] } VSrc_b64:{ *:[i64] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b64:{ *:[i64] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_ADD_U64_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2325,
        GIR_EraseRootFromParent_Done,
      // Label 220: @17674
      GIM_Try, /*On fail goto*//*Label 221*/ GIMT_Encode4(17743), // Rule ID 8232 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX940Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:9:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:9:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:9:y
        GIM_CheckCxxInsnPredicate, /*MI*/1, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_shl_0_to_4),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24898),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (add:{ *:[i64] } i64:{ *:[i64] }:$src2:$pred:9:z, (shl:{ *:[i64] } i64:{ *:[i64] }:$src0:$pred:9:x, i32:{ *:[i32] }:$src1:$pred:9:y)<<P:Predicate_shl_0_to_4>><<P:Predicate_anonymous_24899>>)<<P:9:Predicate_anonymous_24898>>  =>  (V_LSHL_ADD_U64_e64:{ *:[i64] } VSrc_b64:{ *:[i64] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b64:{ *:[i64] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_ADD_U64_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8232,
        GIR_EraseRootFromParent_Done,
      // Label 221: @17743
      GIM_Try, /*On fail goto*//*Label 222*/ GIMT_Encode4(17773), // Rule ID 1195 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24002),
        // (add:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_24002>>  =>  (V_ADD_U64_PSEUDO:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_ADD_U64_PSEUDO),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::VCC), GIMT_Encode2(RegState::Dead),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1195,
        GIR_Done,
      // Label 222: @17773
      GIM_Try, /*On fail goto*//*Label 223*/ GIMT_Encode4(17799), // Rule ID 1197 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18637),
        // (add:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_18637>>  =>  (S_ADD_U64_PSEUDO:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_ADD_U64_PSEUDO),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1197,
        GIR_Done,
      // Label 223: @17799
      GIM_Reject,
    // Label 219: @17800
    GIM_Reject,
    // Label 172: @17801
    GIM_Try, /*On fail goto*//*Label 224*/ GIMT_Encode4(18070),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s16,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
      GIM_Try, /*On fail goto*//*Label 225*/ GIMT_Encode4(17912), // Rule ID 8209 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s16,
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
        GIM_CheckComplexPattern, /*MI*/1, /*Op*/1, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
        GIM_CheckComplexPattern, /*MI*/1, /*Op*/2, /*Renderer*/GIMT_Encode2(2), GIMT_Encode2(GICP_gi_vop3pmods),
        // (add:{ *:[v2i16] } (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src2, i32:{ *:[i32] }:$src2_modifiers), (mul:{ *:[v2i16] } (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers)))  =>  (V_PK_MAD_U16:{ *:[v2i16] } i32:{ *:[i32] }:$src0_modifiers, v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src2_modifiers, v2i16:{ *:[v2i16] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_PK_MAD_U16),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(2), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(2), /*SubOperand*/0, // src1
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src2_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src2
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8209,
        GIR_EraseRootFromParent_Done,
      // Label 225: @17912
      GIM_Try, /*On fail goto*//*Label 226*/ GIMT_Encode4(18008), // Rule ID 1037 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_MUL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s16,
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(2), GIMT_Encode2(GICP_gi_vop3pmods),
        GIM_CheckComplexPattern, /*MI*/1, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
        GIM_CheckComplexPattern, /*MI*/1, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
        // (add:{ *:[v2i16] } (mul:{ *:[v2i16] } (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers)), (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src2, i32:{ *:[i32] }:$src2_modifiers))  =>  (V_PK_MAD_U16:{ *:[v2i16] } i32:{ *:[i32] }:$src0_modifiers, v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src2_modifiers, v2i16:{ *:[v2i16] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_PK_MAD_U16),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(2), /*SubOperand*/1, // src2_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(2), /*SubOperand*/0, // src2
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1037,
        GIR_EraseRootFromParent_Done,
      // Label 226: @18008
      GIM_Try, /*On fail goto*//*Label 227*/ GIMT_Encode4(18069), // Rule ID 1048 //
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
        // (add:{ *:[v2i16] } (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_PK_ADD_U16:{ *:[v2i16] } i32:{ *:[i32] }:$src0_modifiers, v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, v2i16:{ *:[v2i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_PK_ADD_U16),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1048,
        GIR_EraseRootFromParent_Done,
      // Label 227: @18069
      GIM_Reject,
    // Label 224: @18070
    GIM_Reject,
    // Label 173: @18071
    GIM_Reject,
    // Label 1: @18072
    GIM_SwitchType, /*MI*/0, /*Op*/0, /*[*/GIMT_Encode2(7), GIMT_Encode2(12), /*)*//*default:*//*Label 233*/ GIMT_Encode4(18572),
    /*GILLT_s1*//*Label 228*/ GIMT_Encode4(18103),
    /*GILLT_s16*//*Label 229*/ GIMT_Encode4(18222),
    /*GILLT_s32*//*Label 230*/ GIMT_Encode4(18316),
    /*GILLT_s64*//*Label 231*/ GIMT_Encode4(18431),
    /*GILLT_v2s16*//*Label 232*/ GIMT_Encode4(18500),
    // Label 228: @18103
    GIM_Try, /*On fail goto*//*Label 234*/ GIMT_Encode4(18221),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s1,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s1,
      GIM_Try, /*On fail goto*//*Label 235*/ GIMT_Encode4(18142), // Rule ID 7267 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        // (sub:{ *:[i1] } i1:{ *:[i1] }:$src0, -1:{ *:[i1] })  =>  (S_NOT_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOT_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7267,
        GIR_EraseRootFromParent_Done,
      // Label 235: @18142
      GIM_Try, /*On fail goto*//*Label 236*/ GIMT_Encode4(18170), // Rule ID 7274 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        // (sub:{ *:[i1] } i1:{ *:[i1] }:$src0, -1:{ *:[i1] })  =>  (S_NOT_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOT_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7274,
        GIR_EraseRootFromParent_Done,
      // Label 236: @18170
      GIM_Try, /*On fail goto*//*Label 237*/ GIMT_Encode4(18195), // Rule ID 7265 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        // (sub:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_XOR_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7265,
        GIR_Done,
      // Label 237: @18195
      GIM_Try, /*On fail goto*//*Label 238*/ GIMT_Encode4(18220), // Rule ID 7272 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        // (sub:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_XOR_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7272,
        GIR_Done,
      // Label 238: @18220
      GIM_Reject,
    // Label 234: @18221
    GIM_Reject,
    // Label 229: @18222
    GIM_Try, /*On fail goto*//*Label 239*/ GIMT_Encode4(18315),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s16,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
      GIM_Try, /*On fail goto*//*Label 240*/ GIMT_Encode4(18292), // Rule ID 1024 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
        // (sub:{ *:[i16] } (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_SUB_NC_U16_e64:{ *:[i16] } i32:{ *:[i32] }:$src0_modifiers, i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_NC_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1024,
        GIR_EraseRootFromParent_Done,
      // Label 240: @18292
      GIM_Try, /*On fail goto*//*Label 241*/ GIMT_Encode4(18314), // Rule ID 916 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX8GFX9),
        // (sub:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_SUB_U16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_U16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 916,
        GIR_EraseRootFromParent_Done,
      // Label 241: @18314
      GIM_Reject,
    // Label 239: @18315
    GIM_Reject,
    // Label 230: @18316
    GIM_Try, /*On fail goto*//*Label 242*/ GIMT_Encode4(18430),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
      GIM_Try, /*On fail goto*//*Label 243*/ GIMT_Encode4(18361), // Rule ID 46 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/1, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RootCheckRegBankForClass, /*Op*/2, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18638),
        // (sub:{ *:[i32] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_18638>>  =>  (S_SUB_I32:{ *:[i32] }:{ *:[i1] } SSrc_b32:{ *:[i32] }:$src0, SSrc_b32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_SUB_I32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 46,
        GIR_Done,
      // Label 243: @18361
      GIM_Try, /*On fail goto*//*Label 244*/ GIMT_Encode4(18391), // Rule ID 2224 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasAddNoCarryInsts),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24004),
        // (sub:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_24004>>  =>  (V_SUB_U32_e64:{ *:[i32] } ?:{ *:[i32] }:$src0, ?:{ *:[i32] }:$src1, 0:{ *:[i1] })
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2224,
        GIR_EraseRootFromParent_Done,
      // Label 244: @18391
      GIM_Try, /*On fail goto*//*Label 245*/ GIMT_Encode4(18429), // Rule ID 2226 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX6GFX7GFX8GFX9),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24004),
        // (sub:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_24004>>  =>  (V_SUB_CO_U32_e64:{ *:[i32] }:{ *:[i1] } ?:{ *:[i32] }:$src0, ?:{ *:[i32] }:$src1, 0:{ *:[i1] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_CO_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddTempRegister, /*InsnID*/0, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define|RegState::Dead),
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2226,
        GIR_EraseRootFromParent_Done,
      // Label 245: @18429
      GIM_Reject,
    // Label 242: @18430
    GIM_Reject,
    // Label 231: @18431
    GIM_Try, /*On fail goto*//*Label 246*/ GIMT_Encode4(18499),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s64,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
      GIM_Try, /*On fail goto*//*Label 247*/ GIMT_Encode4(18472), // Rule ID 1196 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24004),
        // (sub:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_24004>>  =>  (V_SUB_U64_PSEUDO:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_SUB_U64_PSEUDO),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::VCC), GIMT_Encode2(RegState::Dead),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1196,
        GIR_Done,
      // Label 247: @18472
      GIM_Try, /*On fail goto*//*Label 248*/ GIMT_Encode4(18498), // Rule ID 1198 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18638),
        // (sub:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_18638>>  =>  (S_SUB_U64_PSEUDO:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_SUB_U64_PSEUDO),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1198,
        GIR_Done,
      // Label 248: @18498
      GIM_Reject,
    // Label 246: @18499
    GIM_Reject,
    // Label 232: @18500
    GIM_Try, /*On fail goto*//*Label 249*/ GIMT_Encode4(18571), // Rule ID 1056 //
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s16,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
      GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
      GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
      // (sub:{ *:[v2i16] } (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_PK_SUB_I16:{ *:[v2i16] } i32:{ *:[i32] }:$src0_modifiers, v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, v2i16:{ *:[v2i16] }:$src1)
      GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_PK_SUB_I16),
      GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_RootConstrainSelectedInstOperands,
      // GIR_Coverage, 1056,
      GIR_EraseRootFromParent_Done,
    // Label 249: @18571
    GIM_Reject,
    // Label 233: @18572
    GIM_Reject,
    // Label 2: @18573
    GIM_SwitchType, /*MI*/0, /*Op*/0, /*[*/GIMT_Encode2(8), GIMT_Encode2(12), /*)*//*default:*//*Label 254*/ GIMT_Encode4(18858),
    /*GILLT_s16*//*Label 250*/ GIMT_Encode4(18600),
    /*GILLT_s32*//*Label 251*/ GIMT_Encode4(18655),
    /*GILLT_s64*//*Label 252*/ GIMT_Encode4(18756),
    /*GILLT_v2s16*//*Label 253*/ GIMT_Encode4(18786),
    // Label 250: @18600
    GIM_Try, /*On fail goto*//*Label 255*/ GIMT_Encode4(18654),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s16,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
      GIM_Try, /*On fail goto*//*Label 256*/ GIMT_Encode4(18634), // Rule ID 887 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_Has16BitInsts_NotHasTrue16BitInsts),
        // (mul:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_MUL_LO_U16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_MUL_LO_U16_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 887,
        GIR_Done,
      // Label 256: @18634
      GIM_Try, /*On fail goto*//*Label 257*/ GIMT_Encode4(18653), // Rule ID 888 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasTrue16BitInsts),
        // (mul:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_MUL_LO_U16_t16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_MUL_LO_U16_t16_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 888,
        GIR_Done,
      // Label 257: @18653
      GIM_Reject,
    // Label 255: @18654
    GIM_Reject,
    // Label 251: @18655
    GIM_Try, /*On fail goto*//*Label 258*/ GIMT_Encode4(18755),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
      GIM_Try, /*On fail goto*//*Label 259*/ GIMT_Encode4(18686), // Rule ID 83 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18645),
        // (mul:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_18645>>  =>  (S_MUL_I32:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_MUL_I32),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 83,
        GIR_Done,
      // Label 259: @18686
      GIM_Try, /*On fail goto*//*Label 260*/ GIMT_Encode4(18720), // Rule ID 944 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24493),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3mods0),
        // (mul:{ *:[i32] } (VOP3Mods0:{ *:[i32] } i32:{ *:[i32] }:$src0), i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_24493>>  =>  (V_MUL_LO_U32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_MUL_LO_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 944,
        GIR_EraseRootFromParent_Done,
      // Label 260: @18720
      GIM_Try, /*On fail goto*//*Label 261*/ GIMT_Encode4(18754), // Rule ID 8197 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24493),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3mods0),
        // (mul:{ *:[i32] } i32:{ *:[i32] }:$src1, (VOP3Mods0:{ *:[i32] } i32:{ *:[i32] }:$src0))<<P:Predicate_anonymous_24493>>  =>  (V_MUL_LO_U32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_MUL_LO_U32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8197,
        GIR_EraseRootFromParent_Done,
      // Label 261: @18754
      GIM_Reject,
    // Label 258: @18755
    GIM_Reject,
    // Label 252: @18756
    GIM_Try, /*On fail goto*//*Label 262*/ GIMT_Encode4(18785), // Rule ID 53 //
      GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX12Plus),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s64,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
      GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18645),
      // (mul:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_18645>>  =>  (S_MUL_U64:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
      GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_MUL_U64),
      GIR_RootConstrainSelectedInstOperands,
      // GIR_Coverage, 53,
      GIR_Done,
    // Label 262: @18785
    GIM_Reject,
    // Label 253: @18786
    GIM_Try, /*On fail goto*//*Label 263*/ GIMT_Encode4(18857), // Rule ID 1049 //
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s16,
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
      GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3pmods),
      GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3pmods),
      // (mul:{ *:[v2i16] } (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3PMods:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_PK_MUL_LO_U16:{ *:[v2i16] } i32:{ *:[i32] }:$src0_modifiers, v2i16:{ *:[v2i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, v2i16:{ *:[v2i16] }:$src1)
      GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_PK_MUL_LO_U16),
      GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
      GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
      GIR_RootConstrainSelectedInstOperands,
      // GIR_Coverage, 1049,
      GIR_EraseRootFromParent_Done,
    // Label 263: @18857
    GIM_Reject,
    // Label 254: @18858
    GIM_Reject,
    // Label 3: @18859
    GIM_SwitchType, /*MI*/0, /*Op*/0, /*[*/GIMT_Encode2(7), GIMT_Encode2(17), /*)*//*default:*//*Label 271*/ GIMT_Encode4(19870),
    /*GILLT_s1*//*Label 264*/ GIMT_Encode4(18910),
    /*GILLT_s16*//*Label 265*/ GIMT_Encode4(18973),
    /*GILLT_s32*//*Label 266*/ GIMT_Encode4(19187),
    /*GILLT_s64*//*Label 267*/ GIMT_Encode4(19348),
    /*GILLT_v2s16*//*Label 268*/ GIMT_Encode4(19473),
    /*GILLT_v2s32*//*Label 269*/ GIMT_Encode4(19624), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*GILLT_v4s16*//*Label 270*/ GIMT_Encode4(19747),
    // Label 264: @18910
    GIM_Try, /*On fail goto*//*Label 272*/ GIMT_Encode4(18972),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s1,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s1,
      GIM_Try, /*On fail goto*//*Label 273*/ GIMT_Encode4(18946), // Rule ID 7261 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        // (and:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_AND_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_AND_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7261,
        GIR_Done,
      // Label 273: @18946
      GIM_Try, /*On fail goto*//*Label 274*/ GIMT_Encode4(18971), // Rule ID 7268 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        // (and:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_AND_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_AND_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7268,
        GIR_Done,
      // Label 274: @18971
      GIM_Reject,
    // Label 272: @18972
    GIM_Reject,
    // Label 265: @18973
    GIM_Try, /*On fail goto*//*Label 275*/ GIMT_Encode4(19186),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s16,
      GIM_Try, /*On fail goto*//*Label 276*/ GIMT_Encode4(19035), // Rule ID 8211 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s16,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (and:{ *:[i16] } (xor:{ *:[i16] } i16:{ *:[i16] }:$src1, -1:{ *:[i16] }), i16:{ *:[i16] }:$src0)<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B32:{ *:[i16] }:{ *:[i1] } SSrc_b32:{ *:[i16] }:$src0, SSrc_b32:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8211,
        GIR_EraseRootFromParent_Done,
      // Label 276: @19035
      GIM_Try, /*On fail goto*//*Label 277*/ GIMT_Encode4(19086), // Rule ID 1738 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s16,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (and:{ *:[i16] } i16:{ *:[i16] }:$src0, (xor:{ *:[i16] } i16:{ *:[i16] }:$src1, -1:{ *:[i16] }))<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B32:{ *:[i16] }:{ *:[i1] } SSrc_b32:{ *:[i16] }:$src0, SSrc_b32:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1738,
        GIR_EraseRootFromParent_Done,
      // Label 277: @19086
      GIM_Try, /*On fail goto*//*Label 278*/ GIMT_Encode4(19106), // Rule ID 2253 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (and:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_AND_B32_e64:{ *:[i16] } VSrc_b32:{ *:[i16] }:$src0, VSrc_b32:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2253,
        GIR_Done,
      // Label 278: @19106
      GIM_Try, /*On fail goto*//*Label 279*/ GIMT_Encode4(19162), // Rule ID 909 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_16RegClassID),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
        // (and:{ *:[i16] } (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_AND_B16_t16_e64:{ *:[i16] } i32:{ *:[i32] }:$src0_modifiers, i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_B16_t16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 909,
        GIR_EraseRootFromParent_Done,
      // Label 279: @19162
      GIM_Try, /*On fail goto*//*Label 280*/ GIMT_Encode4(19185), // Rule ID 910 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (and:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_AND_B16_fake16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_B16_fake16_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 910,
        GIR_Done,
      // Label 280: @19185
      GIM_Reject,
    // Label 275: @19186
    GIM_Reject,
    // Label 266: @19187
    GIM_Try, /*On fail goto*//*Label 281*/ GIMT_Encode4(19347),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
      GIM_Try, /*On fail goto*//*Label 282*/ GIMT_Encode4(19249), // Rule ID 8138 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src1, -1:{ *:[i32] }), i32:{ *:[i32] }:$src0)<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8138,
        GIR_EraseRootFromParent_Done,
      // Label 282: @19249
      GIM_Try, /*On fail goto*//*Label 283*/ GIMT_Encode4(19300), // Rule ID 66 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (and:{ *:[i32] } i32:{ *:[i32] }:$src0, (xor:{ *:[i32] } i32:{ *:[i32] }:$src1, -1:{ *:[i32] }))<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 66,
        GIR_EraseRootFromParent_Done,
      // Label 283: @19300
      GIM_Try, /*On fail goto*//*Label 284*/ GIMT_Encode4(19326), // Rule ID 54 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        // (and:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_18646>>  =>  (S_AND_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_AND_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 54,
        GIR_Done,
      // Label 284: @19326
      GIM_Try, /*On fail goto*//*Label 285*/ GIMT_Encode4(19346), // Rule ID 831 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (and:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)  =>  (V_AND_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 831,
        GIR_Done,
      // Label 285: @19346
      GIM_Reject,
    // Label 281: @19347
    GIM_Reject,
    // Label 267: @19348
    GIM_Try, /*On fail goto*//*Label 286*/ GIMT_Encode4(19472),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s64,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
      GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
      GIM_Try, /*On fail goto*//*Label 287*/ GIMT_Encode4(19410), // Rule ID 8139 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$src1, -1:{ *:[i64] }), i64:{ *:[i64] }:$src0)<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8139,
        GIR_EraseRootFromParent_Done,
      // Label 287: @19410
      GIM_Try, /*On fail goto*//*Label 288*/ GIMT_Encode4(19453), // Rule ID 67 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (and:{ *:[i64] } i64:{ *:[i64] }:$src0, (xor:{ *:[i64] } i64:{ *:[i64] }:$src1, -1:{ *:[i64] }))<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 67,
        GIR_EraseRootFromParent_Done,
      // Label 288: @19453
      GIM_Try, /*On fail goto*//*Label 289*/ GIMT_Encode4(19471), // Rule ID 55 //
        // (and:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_18646>>  =>  (S_AND_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_AND_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 55,
        GIR_Done,
      // Label 289: @19471
      GIM_Reject,
    // Label 286: @19472
    GIM_Reject,
    // Label 268: @19473
    GIM_Try, /*On fail goto*//*Label 290*/ GIMT_Encode4(19623),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s16,
      GIM_Try, /*On fail goto*//*Label 291*/ GIMT_Encode4(19543), // Rule ID 8212 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (and:{ *:[v2i16] } (xor:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, immAllOnesV:{ *:[v2i16] }), v2i16:{ *:[v2i16] }:$src0)<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B32:{ *:[v2i16] }:{ *:[i1] } SSrc_b32:{ *:[v2i16] }:$src0, SSrc_b32:{ *:[v2i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8212,
        GIR_EraseRootFromParent_Done,
      // Label 291: @19543
      GIM_Try, /*On fail goto*//*Label 292*/ GIMT_Encode4(19602), // Rule ID 1739 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (and:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, (xor:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, immAllOnesV:{ *:[v2i16] }))<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B32:{ *:[v2i16] }:{ *:[i1] } SSrc_b32:{ *:[v2i16] }:$src0, SSrc_b32:{ *:[v2i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1739,
        GIR_EraseRootFromParent_Done,
      // Label 292: @19602
      GIM_Try, /*On fail goto*//*Label 293*/ GIMT_Encode4(19622), // Rule ID 2256 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (and:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, v2i16:{ *:[v2i16] }:$src1)  =>  (V_AND_B32_e64:{ *:[v2i16] } VSrc_b32:{ *:[v2i16] }:$src0, VSrc_b32:{ *:[v2i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2256,
        GIR_Done,
      // Label 293: @19622
      GIM_Reject,
    // Label 290: @19623
    GIM_Reject,
    // Label 269: @19624
    GIM_Try, /*On fail goto*//*Label 294*/ GIMT_Encode4(19746),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s32,
      GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
      GIM_Try, /*On fail goto*//*Label 295*/ GIMT_Encode4(19694), // Rule ID 8214 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s32,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (and:{ *:[v2i32] } (xor:{ *:[v2i32] } v2i32:{ *:[v2i32] }:$src1, immAllOnesV:{ *:[v2i32] }), v2i32:{ *:[v2i32] }:$src0)<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B64:{ *:[v2i32] }:{ *:[i1] } SSrc_b64:{ *:[v2i32] }:$src0, SSrc_b64:{ *:[v2i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8214,
        GIR_EraseRootFromParent_Done,
      // Label 295: @19694
      GIM_Try, /*On fail goto*//*Label 296*/ GIMT_Encode4(19745), // Rule ID 1741 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s32,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (and:{ *:[v2i32] } v2i32:{ *:[v2i32] }:$src0, (xor:{ *:[v2i32] } v2i32:{ *:[v2i32] }:$src1, immAllOnesV:{ *:[v2i32] }))<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B64:{ *:[v2i32] }:{ *:[i1] } SSrc_b64:{ *:[v2i32] }:$src0, SSrc_b64:{ *:[v2i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1741,
        GIR_EraseRootFromParent_Done,
      // Label 296: @19745
      GIM_Reject,
    // Label 294: @19746
    GIM_Reject,
    // Label 270: @19747
    GIM_Try, /*On fail goto*//*Label 297*/ GIMT_Encode4(19869),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v4s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v4s16,
      GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18646),
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
      GIM_Try, /*On fail goto*//*Label 298*/ GIMT_Encode4(19817), // Rule ID 8213 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v4s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v4s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (and:{ *:[v4i16] } (xor:{ *:[v4i16] } v4i16:{ *:[v4i16] }:$src1, immAllOnesV:{ *:[v4i16] }), v4i16:{ *:[v4i16] }:$src0)<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B64:{ *:[v4i16] }:{ *:[i1] } SSrc_b64:{ *:[v4i16] }:$src0, SSrc_b64:{ *:[v4i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8213,
        GIR_EraseRootFromParent_Done,
      // Label 298: @19817
      GIM_Try, /*On fail goto*//*Label 299*/ GIMT_Encode4(19868), // Rule ID 1740 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v4s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v4s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (and:{ *:[v4i16] } v4i16:{ *:[v4i16] }:$src0, (xor:{ *:[v4i16] } v4i16:{ *:[v4i16] }:$src1, immAllOnesV:{ *:[v4i16] }))<<P:Predicate_anonymous_18646>>  =>  (S_ANDN2_B64:{ *:[v4i16] }:{ *:[i1] } SSrc_b64:{ *:[v4i16] }:$src0, SSrc_b64:{ *:[v4i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ANDN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1740,
        GIR_EraseRootFromParent_Done,
      // Label 299: @19868
      GIM_Reject,
    // Label 297: @19869
    GIM_Reject,
    // Label 271: @19870
    GIM_Reject,
    // Label 4: @19871
    GIM_SwitchType, /*MI*/0, /*Op*/0, /*[*/GIMT_Encode2(7), GIMT_Encode2(17), /*)*//*default:*//*Label 307*/ GIMT_Encode4(34394),
    /*GILLT_s1*//*Label 300*/ GIMT_Encode4(19922),
    /*GILLT_s16*//*Label 301*/ GIMT_Encode4(19985),
    /*GILLT_s32*//*Label 302*/ GIMT_Encode4(20199),
    /*GILLT_s64*//*Label 303*/ GIMT_Encode4(24808),
    /*GILLT_v2s16*//*Label 304*/ GIMT_Encode4(33997),
    /*GILLT_v2s32*//*Label 305*/ GIMT_Encode4(34148), GIMT_Encode4(0), GIMT_Encode4(0), GIMT_Encode4(0),
    /*GILLT_v4s16*//*Label 306*/ GIMT_Encode4(34271),
    // Label 300: @19922
    GIM_Try, /*On fail goto*//*Label 308*/ GIMT_Encode4(19984),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s1,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s1,
      GIM_Try, /*On fail goto*//*Label 309*/ GIMT_Encode4(19958), // Rule ID 7262 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        // (or:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_OR_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_OR_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7262,
        GIR_Done,
      // Label 309: @19958
      GIM_Try, /*On fail goto*//*Label 310*/ GIMT_Encode4(19983), // Rule ID 7269 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        // (or:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_OR_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_OR_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7269,
        GIR_Done,
      // Label 310: @19983
      GIM_Reject,
    // Label 308: @19984
    GIM_Reject,
    // Label 301: @19985
    GIM_Try, /*On fail goto*//*Label 311*/ GIMT_Encode4(20198),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s16,
      GIM_Try, /*On fail goto*//*Label 312*/ GIMT_Encode4(20047), // Rule ID 8215 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s16,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i16] } (xor:{ *:[i16] } i16:{ *:[i16] }:$src1, -1:{ *:[i16] }), i16:{ *:[i16] }:$src0)<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B32:{ *:[i16] }:{ *:[i1] } SSrc_b32:{ *:[i16] }:$src0, SSrc_b32:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8215,
        GIR_EraseRootFromParent_Done,
      // Label 312: @20047
      GIM_Try, /*On fail goto*//*Label 313*/ GIMT_Encode4(20098), // Rule ID 1742 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s16,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i16] } i16:{ *:[i16] }:$src0, (xor:{ *:[i16] } i16:{ *:[i16] }:$src1, -1:{ *:[i16] }))<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B32:{ *:[i16] }:{ *:[i1] } SSrc_b32:{ *:[i16] }:$src0, SSrc_b32:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1742,
        GIR_EraseRootFromParent_Done,
      // Label 313: @20098
      GIM_Try, /*On fail goto*//*Label 314*/ GIMT_Encode4(20118), // Rule ID 2254 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (or:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_OR_B32_e64:{ *:[i16] } VSrc_b32:{ *:[i16] }:$src0, VSrc_b32:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2254,
        GIR_Done,
      // Label 314: @20118
      GIM_Try, /*On fail goto*//*Label 315*/ GIMT_Encode4(20174), // Rule ID 911 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_16RegClassID),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
        // (or:{ *:[i16] } (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_OR_B16_t16_e64:{ *:[i16] } i32:{ *:[i32] }:$src0_modifiers, i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR_B16_t16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 911,
        GIR_EraseRootFromParent_Done,
      // Label 315: @20174
      GIM_Try, /*On fail goto*//*Label 316*/ GIMT_Encode4(20197), // Rule ID 912 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (or:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_OR_B16_fake16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR_B16_fake16_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 912,
        GIR_Done,
      // Label 316: @20197
      GIM_Reject,
    // Label 311: @20198
    GIM_Reject,
    // Label 302: @20199
    GIM_Try, /*On fail goto*//*Label 317*/ GIMT_Encode4(24807),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
      GIM_Try, /*On fail goto*//*Label 318*/ GIMT_Encode4(20348), // Rule ID 10893 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] }), i32:{ *:[i32] }:$z), (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$y))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/3, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10893,
        GIR_EraseRootFromParent_Done,
      // Label 318: @20348
      GIM_Try, /*On fail goto*//*Label 319*/ GIMT_Encode4(20486), // Rule ID 10892 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/1, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] }), i32:{ *:[i32] }:$z), (and:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/3, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10892,
        GIR_EraseRootFromParent_Done,
      // Label 319: @20486
      GIM_Try, /*On fail goto*//*Label 320*/ GIMT_Encode4(20624), // Rule ID 10891 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$z, (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] })), (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$y))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/3, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10891,
        GIR_EraseRootFromParent_Done,
      // Label 320: @20624
      GIM_Try, /*On fail goto*//*Label 321*/ GIMT_Encode4(20762), // Rule ID 10890 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/1, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$z, (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] })), (and:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/3, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10890,
        GIR_EraseRootFromParent_Done,
      // Label 321: @20762
      GIM_Try, /*On fail goto*//*Label 322*/ GIMT_Encode4(20900), // Rule ID 10887 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$x), (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] }), i32:{ *:[i32] }:$z))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10887,
        GIR_EraseRootFromParent_Done,
      // Label 322: @20900
      GIM_Try, /*On fail goto*//*Label 323*/ GIMT_Encode4(21038), // Rule ID 10889 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$y), (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] }), i32:{ *:[i32] }:$z))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10889,
        GIR_EraseRootFromParent_Done,
      // Label 323: @21038
      GIM_Try, /*On fail goto*//*Label 324*/ GIMT_Encode4(21176), // Rule ID 7180 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$x), (and:{ *:[i32] } i32:{ *:[i32] }:$z, (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] })))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7180,
        GIR_EraseRootFromParent_Done,
      // Label 324: @21176
      GIM_Try, /*On fail goto*//*Label 325*/ GIMT_Encode4(21314), // Rule ID 10888 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$y), (and:{ *:[i32] } i32:{ *:[i32] }:$z, (xor:{ *:[i32] } i32:{ *:[i32] }:$x, -1:{ *:[i32] })))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10888,
        GIR_EraseRootFromParent_Done,
      // Label 325: @21314
      GIM_Try, /*On fail goto*//*Label 326*/ GIMT_Encode4(21407), // Rule ID 2320 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:5:x
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordNamedOperand, /*MI*/2, /*Op*/1, /*StoreIdx*/1, // Name : pred:5:y
        GIM_RecordInsn, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        // MIs[3] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/2, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:5:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:5:x, (and:{ *:[i32] } i32:{ *:[i32] }:$src1:$pred:5:y, (imm:{ *:[i32] }))<<P:Predicate_csh_mask_32>>)<<P:Predicate_anonymous_24876>>, i32:{ *:[i32] }:$src2:$pred:5:z)<<P:5:Predicate_anonymous_24886>>  =>  (V_LSHL_OR_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/2, /*OpIdx*/1, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2320,
        GIR_EraseRootFromParent_Done,
      // Label 326: @21407
      GIM_Try, /*On fail goto*//*Label 327*/ GIMT_Encode4(21500), // Rule ID 8228 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:5:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:5:x
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordNamedOperand, /*MI*/2, /*Op*/1, /*StoreIdx*/1, // Name : pred:5:y
        GIM_RecordInsn, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_CONSTANT),
        // MIs[3] Operand 1
        // No operand predicates
        GIM_CheckCxxInsnPredicate, /*MI*/2, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_csh_mask_32),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:5:z, (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:5:x, (and:{ *:[i32] } i32:{ *:[i32] }:$src1:$pred:5:y, (imm:{ *:[i32] }))<<P:Predicate_csh_mask_32>>)<<P:Predicate_anonymous_24876>>)<<P:5:Predicate_anonymous_24886>>  =>  (V_LSHL_OR_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/2, /*OpIdx*/1, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8228,
        GIR_EraseRootFromParent_Done,
      // Label 327: @21500
      GIM_Try, /*On fail goto*//*Label 328*/ GIMT_Encode4(21673), // Rule ID 10938 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), i32:{ *:[i32] }:$y), (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10938,
        GIR_EraseRootFromParent_Done,
      // Label 328: @21673
      GIM_Try, /*On fail goto*//*Label 329*/ GIMT_Encode4(21846), // Rule ID 10939 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), i32:{ *:[i32] }:$y), (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10939,
        GIR_EraseRootFromParent_Done,
      // Label 329: @21846
      GIM_Try, /*On fail goto*//*Label 330*/ GIMT_Encode4(22019), // Rule ID 10940 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), i32:{ *:[i32] }:$y), (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10940,
        GIR_EraseRootFromParent_Done,
      // Label 330: @22019
      GIM_Try, /*On fail goto*//*Label 331*/ GIMT_Encode4(22192), // Rule ID 10941 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), i32:{ *:[i32] }:$y), (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10941,
        GIR_EraseRootFromParent_Done,
      // Label 331: @22192
      GIM_Try, /*On fail goto*//*Label 332*/ GIMT_Encode4(22365), // Rule ID 10934 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z)), (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10934,
        GIR_EraseRootFromParent_Done,
      // Label 332: @22365
      GIM_Try, /*On fail goto*//*Label 333*/ GIMT_Encode4(22538), // Rule ID 10935 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z)), (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10935,
        GIR_EraseRootFromParent_Done,
      // Label 333: @22538
      GIM_Try, /*On fail goto*//*Label 334*/ GIMT_Encode4(22711), // Rule ID 10936 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x)), (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10936,
        GIR_EraseRootFromParent_Done,
      // Label 334: @22711
      GIM_Try, /*On fail goto*//*Label 335*/ GIMT_Encode4(22884), // Rule ID 10937 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x)), (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10937,
        GIR_EraseRootFromParent_Done,
      // Label 335: @22884
      GIM_Try, /*On fail goto*//*Label 336*/ GIMT_Encode4(23057), // Rule ID 10928 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), i32:{ *:[i32] }:$y))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10928,
        GIR_EraseRootFromParent_Done,
      // Label 336: @23057
      GIM_Try, /*On fail goto*//*Label 337*/ GIMT_Encode4(23230), // Rule ID 10929 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), i32:{ *:[i32] }:$y))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10929,
        GIR_EraseRootFromParent_Done,
      // Label 337: @23230
      GIM_Try, /*On fail goto*//*Label 338*/ GIMT_Encode4(23403), // Rule ID 10932 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), i32:{ *:[i32] }:$y))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10932,
        GIR_EraseRootFromParent_Done,
      // Label 338: @23403
      GIM_Try, /*On fail goto*//*Label 339*/ GIMT_Encode4(23576), // Rule ID 10933 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), (and:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), i32:{ *:[i32] }:$y))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10933,
        GIR_EraseRootFromParent_Done,
      // Label 339: @23576
      GIM_Try, /*On fail goto*//*Label 340*/ GIMT_Encode4(23749), // Rule ID 7406 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z)))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7406,
        GIR_EraseRootFromParent_Done,
      // Label 340: @23749
      GIM_Try, /*On fail goto*//*Label 341*/ GIMT_Encode4(23922), // Rule ID 10927 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z), (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x)))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10927,
        GIR_EraseRootFromParent_Done,
      // Label 341: @23922
      GIM_Try, /*On fail goto*//*Label 342*/ GIMT_Encode4(24095), // Rule ID 10930 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$x, i32:{ *:[i32] }:$z)))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10930,
        GIR_EraseRootFromParent_Done,
      // Label 342: @24095
      GIM_Try, /*On fail goto*//*Label 343*/ GIMT_Encode4(24268), // Rule ID 10931 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x), (and:{ *:[i32] } i32:{ *:[i32] }:$y, (or:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$x)))<<P:Predicate_anonymous_24014>>  =>  (V_BFI_B32_e64:{ *:[i32] } (V_XOR_B32_e64:{ *:[i16] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] })), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/4,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10931,
        GIR_EraseRootFromParent_Done,
      // Label 343: @24268
      GIM_Try, /*On fail goto*//*Label 344*/ GIMT_Encode4(24319), // Rule ID 8140 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src1, -1:{ *:[i32] }), i32:{ *:[i32] }:$src0)<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8140,
        GIR_EraseRootFromParent_Done,
      // Label 344: @24319
      GIM_Try, /*On fail goto*//*Label 345*/ GIMT_Encode4(24370), // Rule ID 68 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src0, (xor:{ *:[i32] } i32:{ *:[i32] }:$src1, -1:{ *:[i32] }))<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 68,
        GIR_EraseRootFromParent_Done,
      // Label 345: @24370
      GIM_Try, /*On fail goto*//*Label 346*/ GIMT_Encode4(24435), // Rule ID 2321 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:6:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:6:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:6:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24888),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:6:x, i32:{ *:[i32] }:$src1:$pred:6:y)<<P:Predicate_anonymous_24889>>, i32:{ *:[i32] }:$src2:$pred:6:z)<<P:6:Predicate_anonymous_24888>>  =>  (V_AND_OR_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_OR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2321,
        GIR_EraseRootFromParent_Done,
      // Label 346: @24435
      GIM_Try, /*On fail goto*//*Label 347*/ GIMT_Encode4(24500), // Rule ID 2322 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:7:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:7:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:7:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24891),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:7:x, i32:{ *:[i32] }:$src1:$pred:7:y)<<P:Predicate_anonymous_24892>>, i32:{ *:[i32] }:$src2:$pred:7:z)<<P:7:Predicate_anonymous_24891>>  =>  (V_OR3_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR3_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2322,
        GIR_EraseRootFromParent_Done,
      // Label 347: @24500
      GIM_Try, /*On fail goto*//*Label 348*/ GIMT_Encode4(24565), // Rule ID 2319 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:5:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:5:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:5:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:5:x, i32:{ *:[i32] }:$src1:$pred:5:y)<<P:Predicate_anonymous_24876>>, i32:{ *:[i32] }:$src2:$pred:5:z)<<P:5:Predicate_anonymous_24886>>  =>  (V_LSHL_OR_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2319,
        GIR_EraseRootFromParent_Done,
      // Label 348: @24565
      GIM_Try, /*On fail goto*//*Label 349*/ GIMT_Encode4(24630), // Rule ID 8229 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:6:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:6:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:6:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24888),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:6:z, (and:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:6:x, i32:{ *:[i32] }:$src1:$pred:6:y)<<P:Predicate_anonymous_24889>>)<<P:6:Predicate_anonymous_24888>>  =>  (V_AND_OR_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_AND_OR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8229,
        GIR_EraseRootFromParent_Done,
      // Label 349: @24630
      GIM_Try, /*On fail goto*//*Label 350*/ GIMT_Encode4(24695), // Rule ID 8230 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:7:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:7:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:7:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24891),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:7:z, (or:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:7:x, i32:{ *:[i32] }:$src1:$pred:7:y)<<P:Predicate_anonymous_24892>>)<<P:7:Predicate_anonymous_24891>>  =>  (V_OR3_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR3_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8230,
        GIR_EraseRootFromParent_Done,
      // Label 350: @24695
      GIM_Try, /*On fail goto*//*Label 351*/ GIMT_Encode4(24760), // Rule ID 8227 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX9Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:5:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_SHL),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:5:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:5:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24886),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:5:z, (shl:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:5:x, i32:{ *:[i32] }:$src1:$pred:5:y)<<P:Predicate_anonymous_24876>>)<<P:5:Predicate_anonymous_24886>>  =>  (V_LSHL_OR_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_LSHL_OR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8227,
        GIR_EraseRootFromParent_Done,
      // Label 351: @24760
      GIM_Try, /*On fail goto*//*Label 352*/ GIMT_Encode4(24786), // Rule ID 56 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_18647>>  =>  (S_OR_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_OR_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 56,
        GIR_Done,
      // Label 352: @24786
      GIM_Try, /*On fail goto*//*Label 353*/ GIMT_Encode4(24806), // Rule ID 832 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (or:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)  =>  (V_OR_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 832,
        GIR_Done,
      // Label 353: @24806
      GIM_Reject,
    // Label 317: @24807
    GIM_Reject,
    // Label 303: @24808
    GIM_Try, /*On fail goto*//*Label 354*/ GIMT_Encode4(33996),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s64,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
      GIM_Try, /*On fail goto*//*Label 355*/ GIMT_Encode4(25134), // Rule ID 10901 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] }), i64:{ *:[i64] }:$z), (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$y))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/3, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/3, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10901,
        GIR_EraseRootFromParent_Done,
      // Label 355: @25134
      GIM_Try, /*On fail goto*//*Label 356*/ GIMT_Encode4(25449), // Rule ID 10900 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/1, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] }), i64:{ *:[i64] }:$z), (and:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/3, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/3, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10900,
        GIR_EraseRootFromParent_Done,
      // Label 356: @25449
      GIM_Try, /*On fail goto*//*Label 357*/ GIMT_Encode4(25764), // Rule ID 10899 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$z, (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] })), (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$y))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/3, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/3, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10899,
        GIR_EraseRootFromParent_Done,
      // Label 357: @25764
      GIM_Try, /*On fail goto*//*Label 358*/ GIMT_Encode4(26079), // Rule ID 10898 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/2, /*Op*/2, uint8_t(-1),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/3, /*Op*/1, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$z, (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] })), (and:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/3, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/3, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10898,
        GIR_EraseRootFromParent_Done,
      // Label 358: @26079
      GIM_Try, /*On fail goto*//*Label 359*/ GIMT_Encode4(26394), // Rule ID 10895 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$x), (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] }), i64:{ *:[i64] }:$z))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10895,
        GIR_EraseRootFromParent_Done,
      // Label 359: @26394
      GIM_Try, /*On fail goto*//*Label 360*/ GIMT_Encode4(26709), // Rule ID 10897 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$y), (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] }), i64:{ *:[i64] }:$z))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10897,
        GIR_EraseRootFromParent_Done,
      // Label 360: @26709
      GIM_Try, /*On fail goto*//*Label 361*/ GIMT_Encode4(27024), // Rule ID 7182 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$x), (and:{ *:[i64] } i64:{ *:[i64] }:$z, (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] })))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 7182,
        GIR_EraseRootFromParent_Done,
      // Label 361: @27024
      GIM_Try, /*On fail goto*//*Label 362*/ GIMT_Encode4(27339), // Rule ID 10896 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/3, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckConstantInt8, /*MI*/3, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$y), (and:{ *:[i64] } i64:{ *:[i64] }:$z, (xor:{ *:[i64] } i64:{ *:[i64] }:$x, -1:{ *:[i64] })))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10896,
        GIR_EraseRootFromParent_Done,
      // Label 362: @27339
      GIM_Try, /*On fail goto*//*Label 363*/ GIMT_Encode4(27747), // Rule ID 10953 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), i64:{ *:[i64] }:$y), (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10953,
        GIR_EraseRootFromParent_Done,
      // Label 363: @27747
      GIM_Try, /*On fail goto*//*Label 364*/ GIMT_Encode4(28155), // Rule ID 10954 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), i64:{ *:[i64] }:$y), (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10954,
        GIR_EraseRootFromParent_Done,
      // Label 364: @28155
      GIM_Try, /*On fail goto*//*Label 365*/ GIMT_Encode4(28563), // Rule ID 10955 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), i64:{ *:[i64] }:$y), (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10955,
        GIR_EraseRootFromParent_Done,
      // Label 365: @28563
      GIM_Try, /*On fail goto*//*Label 366*/ GIMT_Encode4(28971), // Rule ID 10956 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), i64:{ *:[i64] }:$y), (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10956,
        GIR_EraseRootFromParent_Done,
      // Label 366: @28971
      GIM_Try, /*On fail goto*//*Label 367*/ GIMT_Encode4(29379), // Rule ID 10949 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z)), (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10949,
        GIR_EraseRootFromParent_Done,
      // Label 367: @29379
      GIM_Try, /*On fail goto*//*Label 368*/ GIMT_Encode4(29787), // Rule ID 10950 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z)), (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10950,
        GIR_EraseRootFromParent_Done,
      // Label 368: @29787
      GIM_Try, /*On fail goto*//*Label 369*/ GIMT_Encode4(30195), // Rule ID 10951 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x)), (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10951,
        GIR_EraseRootFromParent_Done,
      // Label 369: @30195
      GIM_Try, /*On fail goto*//*Label 370*/ GIMT_Encode4(30603), // Rule ID 10952 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/0, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_AND),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/2, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x)), (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10952,
        GIR_EraseRootFromParent_Done,
      // Label 370: @30603
      GIM_Try, /*On fail goto*//*Label 371*/ GIMT_Encode4(31011), // Rule ID 10943 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), i64:{ *:[i64] }:$y))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10943,
        GIR_EraseRootFromParent_Done,
      // Label 371: @31011
      GIM_Try, /*On fail goto*//*Label 372*/ GIMT_Encode4(31419), // Rule ID 10944 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), i64:{ *:[i64] }:$y))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10944,
        GIR_EraseRootFromParent_Done,
      // Label 372: @31419
      GIM_Try, /*On fail goto*//*Label 373*/ GIMT_Encode4(31827), // Rule ID 10947 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), i64:{ *:[i64] }:$y))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10947,
        GIR_EraseRootFromParent_Done,
      // Label 373: @31827
      GIM_Try, /*On fail goto*//*Label 374*/ GIMT_Encode4(32235), // Rule ID 10948 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/1, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), (and:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), i64:{ *:[i64] }:$y))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10948,
        GIR_EraseRootFromParent_Done,
      // Label 374: @32235
      GIM_Try, /*On fail goto*//*Label 375*/ GIMT_Encode4(32643), // Rule ID 7407 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z)))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 7407,
        GIR_EraseRootFromParent_Done,
      // Label 375: @32643
      GIM_Try, /*On fail goto*//*Label 376*/ GIMT_Encode4(33051), // Rule ID 10942 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z), (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x)))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10942,
        GIR_EraseRootFromParent_Done,
      // Label 376: @33051
      GIM_Try, /*On fail goto*//*Label 377*/ GIMT_Encode4(33459), // Rule ID 10945 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/2,
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$x, i64:{ *:[i64] }:$z)))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10945,
        GIR_EraseRootFromParent_Done,
      // Label 377: @33459
      GIM_Try, /*On fail goto*//*Label 378*/ GIMT_Encode4(33867), // Rule ID 10946 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/0, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/3, /*MI*/2, /*OpIdx*/2, // MIs[3]
        GIM_CheckOpcode, /*MI*/3, GIMT_Encode2(TargetOpcode::G_OR),
        // MIs[3] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/1, /*OtherMI*/1, /*OtherOpIdx*/1,
        // MIs[3] x
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/3, /*OpIdx*/2, /*OtherMI*/1, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24014),
        GIM_CheckIsSafeToFold, /*NumInsns*/3,
        // (or:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x), (and:{ *:[i64] } i64:{ *:[i64] }:$y, (or:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$x)))<<P:Predicate_anonymous_24014>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (V_XOR_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/8, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/9, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/10, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/11, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/12, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/12, /*TempRegID*/11, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/12, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/12, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/11, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/11, /*TempRegID*/10, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/11, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/11, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/10, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/10, /*TempRegID*/9, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/10, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/10, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/9, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/9, /*TempRegID*/8, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/9, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/9, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/8,
        GIR_AddSimpleTempRegister, /*InsnID*/8, /*TempRegID*/9,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/8,
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/7,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/10,
        GIR_AddSimpleTempRegister, /*InsnID*/7, /*TempRegID*/11,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/7,
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/5, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/5, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/2, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/4,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/5,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/6,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10946,
        GIR_EraseRootFromParent_Done,
      // Label 378: @33867
      GIM_Try, /*On fail goto*//*Label 379*/ GIMT_Encode4(33918), // Rule ID 8141 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$src1, -1:{ *:[i64] }), i64:{ *:[i64] }:$src0)<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8141,
        GIR_EraseRootFromParent_Done,
      // Label 379: @33918
      GIM_Try, /*On fail goto*//*Label 380*/ GIMT_Encode4(33969), // Rule ID 69 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (or:{ *:[i64] } i64:{ *:[i64] }:$src0, (xor:{ *:[i64] } i64:{ *:[i64] }:$src1, -1:{ *:[i64] }))<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 69,
        GIR_EraseRootFromParent_Done,
      // Label 380: @33969
      GIM_Try, /*On fail goto*//*Label 381*/ GIMT_Encode4(33995), // Rule ID 57 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        // (or:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_anonymous_18647>>  =>  (S_OR_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_OR_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 57,
        GIR_Done,
      // Label 381: @33995
      GIM_Reject,
    // Label 354: @33996
    GIM_Reject,
    // Label 304: @33997
    GIM_Try, /*On fail goto*//*Label 382*/ GIMT_Encode4(34147),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s16,
      GIM_Try, /*On fail goto*//*Label 383*/ GIMT_Encode4(34067), // Rule ID 8216 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (or:{ *:[v2i16] } (xor:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, immAllOnesV:{ *:[v2i16] }), v2i16:{ *:[v2i16] }:$src0)<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B32:{ *:[v2i16] }:{ *:[i1] } SSrc_b32:{ *:[v2i16] }:$src0, SSrc_b32:{ *:[v2i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8216,
        GIR_EraseRootFromParent_Done,
      // Label 383: @34067
      GIM_Try, /*On fail goto*//*Label 384*/ GIMT_Encode4(34126), // Rule ID 1743 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (or:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, (xor:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src1, immAllOnesV:{ *:[v2i16] }))<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B32:{ *:[v2i16] }:{ *:[i1] } SSrc_b32:{ *:[v2i16] }:$src0, SSrc_b32:{ *:[v2i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1743,
        GIR_EraseRootFromParent_Done,
      // Label 384: @34126
      GIM_Try, /*On fail goto*//*Label 385*/ GIMT_Encode4(34146), // Rule ID 2257 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (or:{ *:[v2i16] } v2i16:{ *:[v2i16] }:$src0, v2i16:{ *:[v2i16] }:$src1)  =>  (V_OR_B32_e64:{ *:[v2i16] } VSrc_b32:{ *:[v2i16] }:$src0, VSrc_b32:{ *:[v2i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_OR_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2257,
        GIR_Done,
      // Label 385: @34146
      GIM_Reject,
    // Label 382: @34147
    GIM_Reject,
    // Label 305: @34148
    GIM_Try, /*On fail goto*//*Label 386*/ GIMT_Encode4(34270),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v2s32,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v2s32,
      GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
      GIM_Try, /*On fail goto*//*Label 387*/ GIMT_Encode4(34218), // Rule ID 8218 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s32,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (or:{ *:[v2i32] } (xor:{ *:[v2i32] } v2i32:{ *:[v2i32] }:$src1, immAllOnesV:{ *:[v2i32] }), v2i32:{ *:[v2i32] }:$src0)<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B64:{ *:[v2i32] }:{ *:[i1] } SSrc_b64:{ *:[v2i32] }:$src0, SSrc_b64:{ *:[v2i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8218,
        GIR_EraseRootFromParent_Done,
      // Label 387: @34218
      GIM_Try, /*On fail goto*//*Label 388*/ GIMT_Encode4(34269), // Rule ID 1745 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v2s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v2s32,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (or:{ *:[v2i32] } v2i32:{ *:[v2i32] }:$src0, (xor:{ *:[v2i32] } v2i32:{ *:[v2i32] }:$src1, immAllOnesV:{ *:[v2i32] }))<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B64:{ *:[v2i32] }:{ *:[i1] } SSrc_b64:{ *:[v2i32] }:$src0, SSrc_b64:{ *:[v2i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1745,
        GIR_EraseRootFromParent_Done,
      // Label 388: @34269
      GIM_Reject,
    // Label 386: @34270
    GIM_Reject,
    // Label 306: @34271
    GIM_Try, /*On fail goto*//*Label 389*/ GIMT_Encode4(34393),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_v4s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_v4s16,
      GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18647),
      GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
      GIM_Try, /*On fail goto*//*Label 390*/ GIMT_Encode4(34341), // Rule ID 8217 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v4s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v4s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (or:{ *:[v4i16] } (xor:{ *:[v4i16] } v4i16:{ *:[v4i16] }:$src1, immAllOnesV:{ *:[v4i16] }), v4i16:{ *:[v4i16] }:$src0)<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B64:{ *:[v4i16] }:{ *:[i1] } SSrc_b64:{ *:[v4i16] }:$src0, SSrc_b64:{ *:[v4i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/2, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8217,
        GIR_EraseRootFromParent_Done,
      // Label 390: @34341
      GIM_Try, /*On fail goto*//*Label 391*/ GIMT_Encode4(34392), // Rule ID 1744 //
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_v4s16,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_v4s16,
        GIM_RecordInsn, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcodeIsEither, /*MI*/2, GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR), GIMT_Encode2(TargetOpcode::G_BUILD_VECTOR_TRUNC),
        GIM_CheckIsBuildVectorAllOnes, /*MI*/2,
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (or:{ *:[v4i16] } v4i16:{ *:[v4i16] }:$src0, (xor:{ *:[v4i16] } v4i16:{ *:[v4i16] }:$src1, immAllOnesV:{ *:[v4i16] }))<<P:Predicate_anonymous_18647>>  =>  (S_ORN2_B64:{ *:[v4i16] }:{ *:[i1] } SSrc_b64:{ *:[v4i16] }:$src0, SSrc_b64:{ *:[v4i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_ORN2_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 1744,
        GIR_EraseRootFromParent_Done,
      // Label 391: @34392
      GIM_Reject,
    // Label 389: @34393
    GIM_Reject,
    // Label 307: @34394
    GIM_Reject,
    // Label 5: @34395
    GIM_SwitchType, /*MI*/0, /*Op*/0, /*[*/GIMT_Encode2(7), GIMT_Encode2(12), /*)*//*default:*//*Label 397*/ GIMT_Encode4(38818),
    /*GILLT_s1*//*Label 392*/ GIMT_Encode4(34426),
    /*GILLT_s16*//*Label 393*/ GIMT_Encode4(34489),
    /*GILLT_s32*//*Label 394*/ GIMT_Encode4(34601),
    /*GILLT_s64*//*Label 395*/ GIMT_Encode4(36158),
    /*GILLT_v2s16*//*Label 396*/ GIMT_Encode4(38791),
    // Label 392: @34426
    GIM_Try, /*On fail goto*//*Label 398*/ GIMT_Encode4(34488),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s1,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s1,
      GIM_Try, /*On fail goto*//*Label 399*/ GIMT_Encode4(34462), // Rule ID 7263 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave64),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        // (xor:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_XOR_B64:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B64),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7263,
        GIR_Done,
      // Label 399: @34462
      GIM_Try, /*On fail goto*//*Label 400*/ GIMT_Encode4(34487), // Rule ID 7270 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isWave32),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        // (xor:{ *:[i1] } i1:{ *:[i1] }:$src0, i1:{ *:[i1] }:$src1)  =>  (S_XOR_B32:{ *:[i1] }:{ *:[i1] } ?:{ *:[i1] }:$src0, ?:{ *:[i1] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7270,
        GIR_Done,
      // Label 400: @34487
      GIM_Reject,
    // Label 398: @34488
    GIM_Reject,
    // Label 393: @34489
    GIM_Try, /*On fail goto*//*Label 401*/ GIMT_Encode4(34600),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s16,
      GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s16,
      GIM_Try, /*On fail goto*//*Label 402*/ GIMT_Encode4(34520), // Rule ID 2255 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (xor:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_XOR_B32_e64:{ *:[i16] } VSrc_b32:{ *:[i16] }:$src0, VSrc_b32:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2255,
        GIR_Done,
      // Label 402: @34520
      GIM_Try, /*On fail goto*//*Label 403*/ GIMT_Encode4(34576), // Rule ID 913 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_16RegClassID),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/1, /*Renderer*/GIMT_Encode2(0), GIMT_Encode2(GICP_gi_vop3opselmods),
        GIM_CheckComplexPattern, /*MI*/0, /*Op*/2, /*Renderer*/GIMT_Encode2(1), GIMT_Encode2(GICP_gi_vop3opselmods),
        // (xor:{ *:[i16] } (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src0_modifiers), (VOP3OpSelMods:{ *:[i16] } i16:{ *:[i16] }:$src1, i32:{ *:[i32] }:$src1_modifiers))  =>  (V_XOR_B16_t16_e64:{ *:[i16] } i32:{ *:[i32] }:$src0_modifiers, i16:{ *:[i16] }:$src0, i32:{ *:[i32] }:$src1_modifiers, i16:{ *:[i16] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B16_t16_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/1, // src0_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(0), /*SubOperand*/0, // src0
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/1, // src1_modifiers
        GIR_ComplexSubOperandRenderer, /*InsnID*/0, /*RendererID*/GIMT_Encode2(1), /*SubOperand*/0, // src1
        GIR_AddImm8, /*InsnID*/0, /*Imm*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 913,
        GIR_EraseRootFromParent_Done,
      // Label 403: @34576
      GIM_Try, /*On fail goto*//*Label 404*/ GIMT_Encode4(34599), // Rule ID 914 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX11Plus),
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (xor:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)  =>  (V_XOR_B16_fake16_e64:{ *:[i16] } i16:{ *:[i16] }:$src0, i16:{ *:[i16] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B16_fake16_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 914,
        GIR_Done,
      // Label 404: @34599
      GIM_Reject,
    // Label 401: @34600
    GIM_Reject,
    // Label 394: @34601
    GIM_Try, /*On fail goto*//*Label 405*/ GIMT_Encode4(36157),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s32,
      GIM_Try, /*On fail goto*//*Label 406*/ GIMT_Encode4(34667), // Rule ID 62 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_and_oneuse>>, -1:{ *:[i32] })<<P:Predicate_anonymous_18616>>  =>  (S_NAND_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NAND_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 62,
        GIR_EraseRootFromParent_Done,
      // Label 406: @34667
      GIM_Try, /*On fail goto*//*Label 407*/ GIMT_Encode4(34725), // Rule ID 64 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } (or:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_or_oneuse>>, -1:{ *:[i32] })<<P:Predicate_anonymous_18616>>  =>  (S_NOR_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOR_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 64,
        GIR_EraseRootFromParent_Done,
      // Label 407: @34725
      GIM_Try, /*On fail goto*//*Label 408*/ GIMT_Encode4(34783), // Rule ID 60 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_xor_oneuse>>, -1:{ *:[i32] })<<P:Predicate_anonymous_18616>>  =>  (S_XNOR_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_XNOR_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 60,
        GIR_EraseRootFromParent_Done,
      // Label 408: @34783
      GIM_Try, /*On fail goto*//*Label 409*/ GIMT_Encode4(34905), // Rule ID 10907 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$z), i32:{ *:[i32] }:$x), i32:{ *:[i32] }:$z)<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10907,
        GIR_EraseRootFromParent_Done,
      // Label 409: @34905
      GIM_Try, /*On fail goto*//*Label 410*/ GIMT_Encode4(35027), // Rule ID 10908 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$y), i32:{ *:[i32] }:$x), i32:{ *:[i32] }:$z)<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10908,
        GIR_EraseRootFromParent_Done,
      // Label 410: @35027
      GIM_Try, /*On fail goto*//*Label 411*/ GIMT_Encode4(35149), // Rule ID 10905 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, (xor:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$z)), i32:{ *:[i32] }:$z)<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10905,
        GIR_EraseRootFromParent_Done,
      // Label 411: @35149
      GIM_Try, /*On fail goto*//*Label 412*/ GIMT_Encode4(35271), // Rule ID 10906 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } (and:{ *:[i32] } i32:{ *:[i32] }:$x, (xor:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$y)), i32:{ *:[i32] }:$z)<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10906,
        GIR_EraseRootFromParent_Done,
      // Label 412: @35271
      GIM_Try, /*On fail goto*//*Label 413*/ GIMT_Encode4(35392), // Rule ID 10904 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/1, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$z, (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$y), i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/0, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10904,
        GIR_EraseRootFromParent_Done,
      // Label 413: @35392
      GIM_Try, /*On fail goto*//*Label 414*/ GIMT_Encode4(35513), // Rule ID 10903 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/2, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$z, (and:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$z), i32:{ *:[i32] }:$x))<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/0, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/2, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10903,
        GIR_EraseRootFromParent_Done,
      // Label 414: @35513
      GIM_Try, /*On fail goto*//*Label 415*/ GIMT_Encode4(35634), // Rule ID 10902 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s32,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/1, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$z, (and:{ *:[i32] } i32:{ *:[i32] }:$x, (xor:{ *:[i32] } i32:{ *:[i32] }:$z, i32:{ *:[i32] }:$y)))<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/0, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/2, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 10902,
        GIR_EraseRootFromParent_Done,
      // Label 415: @35634
      GIM_Try, /*On fail goto*//*Label 416*/ GIMT_Encode4(35755), // Rule ID 7183 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s32,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/2, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$z, (and:{ *:[i32] } i32:{ *:[i32] }:$x, (xor:{ *:[i32] } i32:{ *:[i32] }:$y, i32:{ *:[i32] }:$z)))<<P:Predicate_anonymous_24016>>  =>  (V_BFI_B32_e64:{ *:[i32] } (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$x, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$y, VGPR_32:{ *:[i32] }), (COPY_TO_REGCLASS:{ *:[i16] } VSrc_b32:{ *:[i32] }:$z, VGPR_32:{ *:[i32] }))
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s16,
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/3, /*OldInsnID*/0, /*OpIdx*/1, // z
        GIR_ConstrainSelectedInstOperands, /*InsnID*/3,
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/2, /*OldInsnID*/2, /*OpIdx*/1, // y
        GIR_ConstrainSelectedInstOperands, /*InsnID*/2,
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_Copy, /*NewInsnID*/1, /*OldInsnID*/1, /*OpIdx*/1, // x
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/2,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7183,
        GIR_EraseRootFromParent_Done,
      // Label 416: @35755
      GIM_Try, /*On fail goto*//*Label 417*/ GIMT_Encode4(35787), // Rule ID 0 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, -1:{ *:[i32] })<<P:Predicate_anonymous_18616>>  =>  (S_NOT_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOT_B32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 0,
        GIR_EraseRootFromParent_Done,
      // Label 417: @35787
      GIM_Try, /*On fail goto*//*Label 418*/ GIMT_Encode4(35816), // Rule ID 7275 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, -1:{ *:[i32] })<<P:Predicate_anonymous_24016>>  =>  (V_NOT_B32_e32:{ *:[i32] } ?:{ *:[i32] }:$src0)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_NOT_B32_e32),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_RootToRootCopy, /*OpIdx*/1, // src0
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 7275,
        GIR_EraseRootFromParent_Done,
      // Label 418: @35816
      GIM_Try, /*On fail goto*//*Label 419*/ GIMT_Encode4(35884), // Rule ID 2341 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:16:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:16:y
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/2, /*StoreIdx*/2, // Name : pred:16:z
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24940),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:16:x, i32:{ *:[i32] }:$src1:$pred:16:y)<<P:Predicate_anonymous_24895>>, i32:{ *:[i32] }:$src2:$pred:16:z)<<P:16:Predicate_anonymous_24940>>  =>  (V_XOR3_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR3_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/2, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 2341,
        GIR_EraseRootFromParent_Done,
      // Label 419: @35884
      GIM_Try, /*On fail goto*//*Label 420*/ GIMT_Encode4(35952), // Rule ID 8242 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_isGFX10Plus),
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordNamedOperand, /*MI*/0, /*Op*/1, /*StoreIdx*/2, // Name : pred:16:z
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/1, /*StoreIdx*/0, // Name : pred:16:x
        GIM_RecordNamedOperand, /*MI*/1, /*Op*/2, /*StoreIdx*/1, // Name : pred:16:y
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24940),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$src2:$pred:16:z, (xor:{ *:[i32] } i32:{ *:[i32] }:$src0:$pred:16:x, i32:{ *:[i32] }:$src1:$pred:16:y)<<P:Predicate_anonymous_24895>>)<<P:16:Predicate_anonymous_24940>>  =>  (V_XOR3_B32_e64:{ *:[i32] } VSrc_b32:{ *:[i32] }:$src0, VSrc_b32:{ *:[i32] }:$src1, VSrc_b32:{ *:[i32] }:$src2)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR3_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootToRootCopy, /*OpIdx*/1, // src2
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8242,
        GIR_EraseRootFromParent_Done,
      // Label 420: @35952
      GIM_Try, /*On fail goto*//*Label 421*/ GIMT_Encode4(35981), // Rule ID 58 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_32RegClassID),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18648),
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)<<P:Predicate_anonymous_18648>>  =>  (S_XOR_B32:{ *:[i32] }:{ *:[i1] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::S_XOR_B32),
        GIR_AddImplicitDef, /*InsnID*/0, GIMT_Encode2(AMDGPU::SCC), GIMT_Encode2(RegState::Dead),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 58,
        GIR_Done,
      // Label 421: @35981
      GIM_Try, /*On fail goto*//*Label 422*/ GIMT_Encode4(36031), // Rule ID 8174 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasDLInsts),
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, -1:{ *:[i32] }), i32:{ *:[i32] }:$src1)  =>  (V_XNOR_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XNOR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/2, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8174,
        GIR_EraseRootFromParent_Done,
      // Label 422: @36031
      GIM_Try, /*On fail goto*//*Label 423*/ GIMT_Encode4(36083), // Rule ID 917 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasDLInsts),
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1), -1:{ *:[i32] })  =>  (V_XNOR_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XNOR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 917,
        GIR_EraseRootFromParent_Done,
      // Label 423: @36083
      GIM_Try, /*On fail goto*//*Label 424*/ GIMT_Encode4(36133), // Rule ID 8175 //
        GIM_CheckFeatures, GIMT_Encode2(GIFBS_HasDLInsts),
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s32,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s32,
        GIM_CheckConstantInt8, /*MI*/1, /*Op*/2, uint8_t(-1),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$src1, (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, -1:{ *:[i32] }))  =>  (V_XNOR_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::V_XNOR_B32_e64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[vdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_RootToRootCopy, /*OpIdx*/1, // src1
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 8175,
        GIR_EraseRootFromParent_Done,
      // Label 424: @36133
      GIM_Try, /*On fail goto*//*Label 425*/ GIMT_Encode4(36156), // Rule ID 833 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s32,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // (xor:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)  =>  (V_XOR_B32_e64:{ *:[i32] } i32:{ *:[i32] }:$src0, i32:{ *:[i32] }:$src1)
        GIR_MutateOpcode, /*InsnID*/0, /*RecycleInsnID*/0, /*Opcode*/GIMT_Encode2(AMDGPU::V_XOR_B32_e64),
        GIR_AddImplicitUse, /*InsnID*/0, GIMT_Encode2(AMDGPU::EXEC),
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 833,
        GIR_Done,
      // Label 425: @36156
      GIM_Reject,
    // Label 405: @36157
    GIM_Reject,
    // Label 395: @36158
    GIM_Try, /*On fail goto*//*Label 426*/ GIMT_Encode4(38790),
      GIM_RootCheckType, /*Op*/1, /*Type*/GILLT_s64,
      GIM_Try, /*On fail goto*//*Label 427*/ GIMT_Encode4(36224), // Rule ID 63 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_and_oneuse>>, -1:{ *:[i64] })<<P:Predicate_anonymous_18616>>  =>  (S_NAND_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NAND_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 63,
        GIR_EraseRootFromParent_Done,
      // Label 427: @36224
      GIM_Try, /*On fail goto*//*Label 428*/ GIMT_Encode4(36282), // Rule ID 65 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_OR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i64] } (or:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_or_oneuse>>, -1:{ *:[i64] })<<P:Predicate_anonymous_18616>>  =>  (S_NOR_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_NOR_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 65,
        GIR_EraseRootFromParent_Done,
      // Label 428: @36282
      GIM_Try, /*On fail goto*//*Label 429*/ GIMT_Encode4(36340), // Rule ID 61 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::SReg_64RegClassID),
        GIM_RecordInsn, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_CheckHasOneUse, /*MI*/1,
        GIM_CheckConstantInt8, /*MI*/0, /*Op*/2, uint8_t(-1),
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_18616),
        GIM_CheckIsSafeToFold, /*NumInsns*/1,
        // (xor:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)<<P:Predicate_xor_oneuse>>, -1:{ *:[i64] })<<P:Predicate_anonymous_18616>>  =>  (S_XNOR_B64:{ *:[i64] }:{ *:[i1] } i64:{ *:[i64] }:$src0, i64:{ *:[i64] }:$src1)
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(AMDGPU::S_XNOR_B64),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[sdst]
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/1, // src0
        GIR_Copy, /*NewInsnID*/0, /*OldInsnID*/1, /*OpIdx*/2, // src1
        GIR_SetImplicitDefDead, /*InsnID*/0, /*OpIdx for AMDGPU::SCC*/0,
        GIR_RootConstrainSelectedInstOperands,
        // GIR_Coverage, 61,
        GIR_EraseRootFromParent_Done,
      // Label 429: @36340
      GIM_Try, /*On fail goto*//*Label 430*/ GIMT_Encode4(36639), // Rule ID 10914 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$z), i64:{ *:[i64] }:$x), i64:{ *:[i64] }:$z)<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10914,
        GIR_EraseRootFromParent_Done,
      // Label 430: @36639
      GIM_Try, /*On fail goto*//*Label 431*/ GIMT_Encode4(36938), // Rule ID 10915 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$y), i64:{ *:[i64] }:$x), i64:{ *:[i64] }:$z)<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10915,
        GIR_EraseRootFromParent_Done,
      // Label 431: @36938
      GIM_Try, /*On fail goto*//*Label 432*/ GIMT_Encode4(37237), // Rule ID 10912 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/2,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, (xor:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$z)), i64:{ *:[i64] }:$z)<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10912,
        GIR_EraseRootFromParent_Done,
      // Label 432: @37237
      GIM_Try, /*On fail goto*//*Label 433*/ GIMT_Encode4(37536), // Rule ID 10913 //
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/1, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[0] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/0, /*OpIdx*/2, /*OtherMI*/2, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } (and:{ *:[i64] } i64:{ *:[i64] }:$x, (xor:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$y)), i64:{ *:[i64] }:$z)<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10913,
        GIR_EraseRootFromParent_Done,
      // Label 433: @37536
      GIM_Try, /*On fail goto*//*Label 434*/ GIMT_Encode4(37834), // Rule ID 10911 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/1, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } i64:{ *:[i64] }:$z, (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$y), i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10911,
        GIR_EraseRootFromParent_Done,
      // Label 434: @37834
      GIM_Try, /*On fail goto*//*Label 435*/ GIMT_Encode4(38132), // Rule ID 10910 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/1, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/2, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } i64:{ *:[i64] }:$z, (and:{ *:[i64] } (xor:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$z), i64:{ *:[i64] }:$x))<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10910,
        GIR_EraseRootFromParent_Done,
      // Label 435: @38132
      GIM_Try, /*On fail goto*//*Label 436*/ GIMT_Encode4(38430), // Rule ID 10909 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/2, /*Type*/GILLT_s64,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/1, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } i64:{ *:[i64] }:$z, (and:{ *:[i64] } i64:{ *:[i64] }:$x, (xor:{ *:[i64] } i64:{ *:[i64] }:$z, i64:{ *:[i64] }:$y)))<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/2, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/2, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/2, /*TempRegID*/1, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/2, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // x
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/2, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/1, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/1, /*TempRegID*/0, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/1,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/2,
        GIR_AddSimpleTempRegister, /*InsnID*/1, /*TempRegID*/3,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/1,
        GIR_BuildRootMI, /*Opcode*/GIMT_Encode2(TargetOpcode::REG_SEQUENCE),
        GIR_RootToRootCopy, /*OpIdx*/0, // DstI[dst]
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/0,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/3,
        GIR_AddSimpleTempRegister, /*InsnID*/0, /*TempRegID*/4,
        GIR_AddImm8, /*InsnID*/0, /*SubRegIndex*/11,
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/0, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/1, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/0, /*Op*/3, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        // GIR_Coverage, 10909,
        GIR_EraseRootFromParent_Done,
      // Label 436: @38430
      GIM_Try, /*On fail goto*//*Label 437*/ GIMT_Encode4(38728), // Rule ID 7184 //
        GIM_RootCheckType, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RootCheckRegBankForClass, /*Op*/0, /*RC*/GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/1, /*MI*/0, /*OpIdx*/2, // MIs[1]
        GIM_CheckOpcode, /*MI*/1, GIMT_Encode2(TargetOpcode::G_AND),
        GIM_CheckType, /*MI*/1, /*Op*/1, /*Type*/GILLT_s64,
        GIM_CheckType, /*MI*/1, /*Op*/2, /*Type*/GILLT_s64,
        GIM_RecordInsnIgnoreCopies, /*DefineMI*/2, /*MI*/1, /*OpIdx*/2, // MIs[2]
        GIM_CheckOpcode, /*MI*/2, GIMT_Encode2(TargetOpcode::G_XOR),
        GIM_CheckType, /*MI*/2, /*Op*/1, /*Type*/GILLT_s64,
        // MIs[2] z
        GIM_CheckIsSameOperandIgnoreCopies, /*MI*/2, /*OpIdx*/2, /*OtherMI*/0, /*OtherOpIdx*/1,
        GIM_CheckCxxInsnPredicate, /*MI*/0, /*FnId*/GIMT_Encode2(GICXXPred_MI_Predicate_anonymous_24016),
        GIM_CheckIsSafeToFold, /*NumInsns*/2,
        // (xor:{ *:[i64] } i64:{ *:[i64] }:$z, (and:{ *:[i64] } i64:{ *:[i64] }:$x, (xor:{ *:[i64] } i64:{ *:[i64] }:$y, i64:{ *:[i64] }:$z)))<<P:Predicate_anonymous_24016>>  =>  (REG_SEQUENCE:{ *:[i64] } VReg_64:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub0:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub0:{ *:[i32] })), sub0:{ *:[i32] }, (V_BFI_B32_e64:{ *:[i16] } (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$x, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$y, sub1:{ *:[i32] }), (EXTRACT_SUBREG:{ *:[i32] } VReg_64:{ *:[i64] }:$z, sub1:{ *:[i32] })), sub1:{ *:[i32] })
        GIR_MakeTempReg, /*TempRegID*/0, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/1, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/2, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/3, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/4, /*TypeID*/GILLT_s16,
        GIR_MakeTempReg, /*TempRegID*/5, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/6, /*TypeID*/GILLT_s32,
        GIR_MakeTempReg, /*TempRegID*/7, /*TypeID*/GILLT_s32,
        GIR_BuildMI, /*InsnID*/8, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/8, /*TempRegID*/7, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/8, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // z
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/8, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/7, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/7, /*TempRegID*/6, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/7, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // y
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/7, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/6, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/6, /*TempRegID*/5, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/6, /*OldInsnID*/1, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(11), // x
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/6, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/5, /*Opcode*/GIMT_Encode2(AMDGPU::V_BFI_B32_e64),
        GIR_AddTempRegister, /*InsnID*/5, /*TempRegID*/4, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/5,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/6,
        GIR_AddSimpleTempRegister, /*InsnID*/5, /*TempRegID*/7,
        GIR_ConstrainSelectedInstOperands, /*InsnID*/5,
        GIR_BuildMI, /*InsnID*/4, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/4, /*TempRegID*/3, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/4, /*OldInsnID*/0, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // z
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID),
        GIR_ConstrainOperandRC, /*InsnID*/4, /*Op*/1, GIMT_Encode2(AMDGPU::VReg_64RegClassID),
        GIR_BuildMI, /*InsnID*/3, /*Opcode*/GIMT_Encode2(TargetOpcode::COPY),
        GIR_AddTempRegister, /*InsnID*/3, /*TempRegID*/2, /*TempRegFlags*/GIMT_Encode2(RegState::Define),
        GIR_CopySubReg, /*NewInsnID*/3, /*OldInsnID*/2, /*OpIdx*/1, /*SubRegIdx*/GIMT_Encode2(3), // y
        GIR_ConstrainOperandRC, /*InsnID*/3, /*Op*/0, GIMT_Encode2(AMDGPU::VGPR_32RegClassID)<TRUNCATED>#undef GIMT_Encode2#undef GIMT_Encode4#undef GIMT_Encode8#endif // ifdef GET_GLOBALISEL_IMPL#ifdef GET_GLOBALISEL_PREDICATES_DECL#endif // ifdef GET_GLOBALISEL_PREDICATES_DECL#ifdef GET_GLOBALISEL_PREDICATES_INIT#endif // ifdef GET_GLOBALISEL_PREDICATES_INIT