llvm/llvm/unittests/CodeGen/GlobalISel/KnownBitsTest.cpp

//===- KnownBitsTest.cpp -------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//

#include "GISelMITest.h"
#include "llvm/CodeGen/GlobalISel/GISelKnownBits.h"
#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"

TEST_F(AArch64GISelMITest, TestKnownBitsCst) {}

TEST_F(AArch64GISelMITest, TestKnownBitsCstWithClass) {}

// Check that we are able to track bits through PHIs
// and get the intersections of everything we know on each operand.
TEST_F(AArch64GISelMITest, TestKnownBitsCstPHI) {}

// Check that we report we know nothing when we hit a
// non-generic register.
// Note: this could be improved though!
TEST_F(AArch64GISelMITest, TestKnownBitsCstPHIToNonGenericReg) {}

// Check that we know nothing when at least one value of a PHI
// comes from something we cannot analysis.
// This test is not particularly interesting, it is just
// here to cover the code that stops the analysis of PHIs
// earlier. In that case, we would not even look at the
// second incoming value.
TEST_F(AArch64GISelMITest, TestKnownBitsUnknownPHI) {}

// Check that we manage to process PHIs that loop on themselves.
// For now, the analysis just stops and assumes it knows nothing,
// eventually we could teach it how to properly track phis that
// loop back.
TEST_F(AArch64GISelMITest, TestKnownBitsCstPHIWithLoop) {}

// Check that we don't try to analysis PHIs progression.
// Setting a deep enough max depth would allow to effectively simulate
// what happens in the loop.
// Thus, with a deep enough depth, we could actually figure out
// that %14's zero known bits are actually at least what we know
// for %10, right shifted by one.
// However, this process is super expensive compile-time wise and
// we don't want to reach that conclusion while playing with max depth.
// For now, the analysis just stops and assumes it knows nothing
// on PHIs, but eventually we could teach it how to properly track
// phis that loop back without relying on the luck effect of max
// depth.
TEST_F(AArch64GISelMITest, TestKnownBitsDecreasingCstPHIWithLoop) {}

TEST_F(AArch64GISelMITest, TestKnownBitsPtrToIntViceVersa) {}

TEST_F(AArch64GISelMITest, TestKnownBitsAND) {}

TEST_F(AArch64GISelMITest, TestKnownBitsOR) {}

TEST_F(AArch64GISelMITest, TestKnownBitsXOR) {}

TEST_F(AArch64GISelMITest, TestKnownBitsXORConstant) {}

TEST_F(AArch64GISelMITest, TestKnownBitsASHR) {}

TEST_F(AArch64GISelMITest, TestKnownBitsLSHR) {}

TEST_F(AArch64GISelMITest, TestKnownBitsSHL) {}

TEST_F(AArch64GISelMITest, TestKnownBitsADD) {}

TEST_F(AArch64GISelMITest, TestKnownBitsSUB) {}

TEST_F(AArch64GISelMITest, TestKnownBitsMUL) {}

TEST_F(AArch64GISelMITest, TestKnownBitsICMP) {}

TEST_F(AArch64GISelMITest, TestKnownBitsFCMP) {}

TEST_F(AArch64GISelMITest, TestKnownBitsSelect) {}

TEST_F(AArch64GISelMITest, TestKnownBits) {}

TEST_F(AArch64GISelMITest, TestSignBitIsZero) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsConstant) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsXOR) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsOR) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsAND) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsSext) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsSextInReg) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsAssertSext) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsTrunc) {}

TEST_F(AArch64GISelMITest, TestNumSignBitsCmp) {}

TEST_F(AMDGPUGISelMITest, TestNumSignBitsTrunc) {}

TEST_F(AMDGPUGISelMITest, TestTargetKnownAlign) {}

TEST_F(AMDGPUGISelMITest, TestIsKnownToBeAPowerOfTwo) {}

static void AddRangeMetadata(LLVMContext &Context, MachineInstr *Load) {}

TEST_F(AArch64GISelMITest, TestMetadata) {}

TEST_F(AArch64GISelMITest, TestMetadataExt) {}

TEST_F(AArch64GISelMITest, TestMetadataZExt) {}

TEST_F(AArch64GISelMITest, TestMetadataSExt) {}

TEST_F(AArch64GISelMITest, TestKnownBitsExt) {}

TEST_F(AArch64GISelMITest, TestKnownBitsSextInReg) {}

TEST_F(AArch64GISelMITest, TestKnownBitsAssertSext) {}

TEST_F(AArch64GISelMITest, TestKnownBitsMergeValues) {}

TEST_F(AArch64GISelMITest, TestKnownBitsUnmergeValues) {}

TEST_F(AArch64GISelMITest, TestKnownBitsBSwapBitReverse) {}

TEST_F(AArch64GISelMITest, TestKnownBitsUMAX) {}

TEST_F(AArch64GISelMITest, TestKnownBitsUMax) {}

TEST_F(AArch64GISelMITest, TestKnownBitsUMIN) {}

TEST_F(AArch64GISelMITest, TestKnownBitsSMAX) {}

TEST_F(AArch64GISelMITest, TestKnownBitsSMIN) {}

TEST_F(AArch64GISelMITest, TestInvalidQueries) {}

TEST_F(AArch64GISelMITest, TestKnownBitsAssertZext) {}

TEST_F(AArch64GISelMITest, TestKnownBitsCTPOP) {}

TEST_F(AMDGPUGISelMITest, TestKnownBitsUBFX) {}

TEST_F(AMDGPUGISelMITest, TestKnownBitsSBFX) {}

TEST_F(AMDGPUGISelMITest, TestNumSignBitsUBFX) {}

TEST_F(AMDGPUGISelMITest, TestNumSignBitsSBFX) {}

TEST_F(AMDGPUGISelMITest, TestKnownBitsAssertAlign) {}

TEST_F(AArch64GISelMITest, TestKnownBitsUADDO) {}