llvm/llvm/unittests/Analysis/MemorySSATest.cpp

//===- MemorySSA.cpp - Unit tests for MemorySSA ---------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "llvm/Analysis/MemorySSA.h"
#include "llvm/Analysis/AliasAnalysis.h"
#include "llvm/Analysis/AssumptionCache.h"
#include "llvm/Analysis/BasicAliasAnalysis.h"
#include "llvm/Analysis/MemorySSAUpdater.h"
#include "llvm/Analysis/TargetLibraryInfo.h"
#include "llvm/AsmParser/Parser.h"
#include "llvm/IR/BasicBlock.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/Dominators.h"
#include "llvm/IR/IRBuilder.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/LLVMContext.h"
#include "llvm/IR/Module.h"
#include "llvm/Support/SourceMgr.h"
#include "gtest/gtest.h"

usingnamespacellvm;

const static char DLString[] =;

/// There's a lot of common setup between these tests. This fixture helps reduce
/// that. Tests should mock up a function, store it in F, and then call
/// setupAnalyses().
class MemorySSATest : public testing::Test {};

TEST_F(MemorySSATest, CreateALoad) {}
TEST_F(MemorySSATest, CreateLoadsAndStoreUpdater) {}

TEST_F(MemorySSATest, CreateALoadUpdater) {}

TEST_F(MemorySSATest, SinkLoad) {}

TEST_F(MemorySSATest, MoveAStore) {}

TEST_F(MemorySSATest, MoveAStoreUpdater) {}

TEST_F(MemorySSATest, MoveAStoreUpdaterMove) {}

TEST_F(MemorySSATest, MoveAStoreAllAround) {}

TEST_F(MemorySSATest, RemoveAPhi) {}

TEST_F(MemorySSATest, RemoveMemoryAccess) {}

// We had a bug with caching where the walker would report MemoryDef#3's clobber
// (below) was MemoryDef#1.
//
// define void @F(i8*) {
//   %A = alloca i8, i8 1
// ; 1 = MemoryDef(liveOnEntry)
//   store i8 0, i8* %A
// ; 2 = MemoryDef(1)
//   store i8 1, i8* %A
// ; 3 = MemoryDef(2)
//   store i8 2, i8* %A
// }
TEST_F(MemorySSATest, TestTripleStore) {}

// ...And fixing the above bug made it obvious that, when walking, MemorySSA's
// walker was caching the initial node it walked. This was fine (albeit
// mostly redundant) unless the initial node being walked is a clobber for the
// query. In that case, we'd cache that the node clobbered itself.
TEST_F(MemorySSATest, TestStoreAndLoad) {}

// Another bug (related to the above two fixes): It was noted that, given the
// following code:
// ; 1 = MemoryDef(liveOnEntry)
// store i8 0, i8* %1
//
// ...A query to getClobberingMemoryAccess(MemoryAccess*, MemoryLocation) would
// hand back the store (correctly). A later call to
// getClobberingMemoryAccess(const Instruction*) would also hand back the store
// (incorrectly; it should return liveOnEntry).
//
// This test checks that repeated calls to either function returns what they're
// meant to.
TEST_F(MemorySSATest, TestStoreDoubleQuery) {}

// Bug: During phi optimization, the walker wouldn't cache to the proper result
// in the farthest-walked BB.
//
// Specifically, it would assume that whatever we walked to was a clobber.
// "Whatever we walked to" isn't a clobber if we hit a cache entry.
//
// ...So, we need a test case that looks like:
//    A
//   / \
//  B   |
//   \ /
//    C
//
// Where, when we try to optimize a thing in 'C', a blocker is found in 'B'.
// The walk must determine that the blocker exists by using cache entries *while
// walking* 'B'.
TEST_F(MemorySSATest, PartialWalkerCacheWithPhis) {}

// Test that our walker properly handles loads with the invariant group
// attribute. It's a bit hacky, since we add the invariant attribute *after*
// building MSSA. Otherwise, the use optimizer will optimize it for us, which
// isn't what we want.
// FIXME: It may be easier/cleaner to just add an 'optimize uses?' flag to MSSA.
TEST_F(MemorySSATest, WalkerInvariantLoadOpt) {}

// Test loads get reoptimized properly by the walker.
TEST_F(MemorySSATest, WalkerReopt) {}

// Test out MemorySSAUpdater::moveBefore
TEST_F(MemorySSATest, MoveAboveMemoryDef) {}

TEST_F(MemorySSATest, Irreducible) {}

TEST_F(MemorySSATest, MoveToBeforeLiveOnEntryInvalidatesCache) {}

TEST_F(MemorySSATest, RemovingDefInvalidatesCache) {}

// Test Must alias for optimized defs.
TEST_F(MemorySSATest, TestStoreMustAlias) {}

// Test May alias for optimized defs.
TEST_F(MemorySSATest, TestStoreMayAlias) {}

TEST_F(MemorySSATest, LifetimeMarkersAreClobbers) {}

TEST_F(MemorySSATest, DefOptimizationsAreInvalidatedOnMoving) {}

TEST_F(MemorySSATest, TestOptimizedDefsAreProperUses) {}

//   entry
//     |
//   header
//    / \
// body  |
//    \ /
//    exit
// header:
//  ; 1 = MemoryDef(liveOnEntry)
// body:
//  ; 2 = MemoryDef(1)
// exit:
//  ; 3 = MemoryPhi({body, 2}, {header, 1})
//  ; 4 = MemoryDef(3); optimized to 3, cannot optimize thorugh phi.
//  Insert edge: entry -> exit, check mssa Update is correct.
TEST_F(MemorySSATest, TestAddedEdgeToBlockWithPhiNotOpt) {}

//   entry
//     |
//   header
//    / \
// body  |
//    \ /
//    exit
// header:
//  ; 1 = MemoryDef(liveOnEntry)
// body:
//  ; 2 = MemoryDef(1)
// exit:
//  ; 3 = MemoryPhi({body, 2}, {header, 1})
//  ; 4 = MemoryDef(3); optimize this to 1 now, added edge should invalidate
//  the optimized access.
//  Insert edge: entry -> exit, check mssa Update is correct.
TEST_F(MemorySSATest, TestAddedEdgeToBlockWithPhiOpt) {}

//   entry
//    /  |
//   a   |
//  / \  |
//  b c  f
//  \ /  |
//   d   |
//    \ /
//     e
// f:
//  ; 1 = MemoryDef(liveOnEntry)
// e:
//  ; 2 = MemoryPhi({d, liveOnEntry}, {f, 1})
//
// Insert edge: f -> c, check update is correct.
// After update:
// f:
//  ; 1 = MemoryDef(liveOnEntry)
// c:
//  ; 3 = MemoryPhi({a, liveOnEntry}, {f, 1})
// d:
//  ; 4 = MemoryPhi({b, liveOnEntry}, {c, 3})
// e:
//  ; 2 = MemoryPhi({d, 4}, {f, 1})
TEST_F(MemorySSATest, TestAddedEdgeToBlockWithNoPhiAddNewPhis) {}

TEST_F(MemorySSATest, TestCallClobber) {}

TEST_F(MemorySSATest, TestLoadClobber) {}

// We want to test if the location information are retained
// when the IsGuaranteedLoopInvariant function handles a
// memory access referring to a pointer defined in the entry
// block, hence automatically guaranteed to be loop invariant.
TEST_F(MemorySSATest, TestLoopInvariantEntryBlockPointer) {}

TEST_F(MemorySSATest, TestInvariantGroup) {}

static BasicBlock *getBasicBlockByName(Function &F, StringRef Name) {}

static Instruction *getInstructionByName(Function &F, StringRef Name) {}

TEST_F(MemorySSATest, TestVisitedBlocks) {}

TEST_F(MemorySSATest, TestNoDbgInsts) {}