summaryrefslogtreecommitdiff
path: root/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp')
-rw-r--r--llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp33
1 files changed, 26 insertions, 7 deletions
diff --git a/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp b/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
index 35ba4e2b4032..318c4c06f0f7 100644
--- a/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
+++ b/llvm/lib/Transforms/Scalar/LoopIdiomRecognize.cpp
@@ -1172,8 +1172,15 @@ bool LoopIdiomRecognize::processLoopStridedStore(
CallInst *NewCall;
if (SplatValue) {
- NewCall = Builder.CreateMemSet(BasePtr, SplatValue, NumBytes,
- MaybeAlign(StoreAlignment));
+ AAMDNodes AATags = TheStore->getAAMetadata();
+ if (auto CI = dyn_cast<ConstantInt>(NumBytes))
+ AATags = AATags.extendTo(CI->getZExtValue());
+ else
+ AATags = AATags.extendTo(-1);
+
+ NewCall = Builder.CreateMemSet(
+ BasePtr, SplatValue, NumBytes, MaybeAlign(StoreAlignment),
+ /*isVolatile=*/false, AATags.TBAA, AATags.Scope, AATags.NoAlias);
} else {
// Everything is emitted in default address space
Type *Int8PtrTy = DestInt8PtrTy;
@@ -1452,17 +1459,28 @@ bool LoopIdiomRecognize::processLoopStoreOfLoopLoad(
Value *NumBytes =
Expander.expandCodeFor(NumBytesS, IntIdxTy, Preheader->getTerminator());
+ AAMDNodes AATags = TheLoad->getAAMetadata();
+ AAMDNodes StoreAATags = TheStore->getAAMetadata();
+ AATags = AATags.merge(StoreAATags);
+ if (auto CI = dyn_cast<ConstantInt>(NumBytes))
+ AATags = AATags.extendTo(CI->getZExtValue());
+ else
+ AATags = AATags.extendTo(-1);
+
CallInst *NewCall = nullptr;
// Check whether to generate an unordered atomic memcpy:
// If the load or store are atomic, then they must necessarily be unordered
// by previous checks.
if (!TheStore->isAtomic() && !TheLoad->isAtomic()) {
if (UseMemMove)
- NewCall = Builder.CreateMemMove(StoreBasePtr, StoreAlign, LoadBasePtr,
- LoadAlign, NumBytes);
+ NewCall = Builder.CreateMemMove(
+ StoreBasePtr, StoreAlign, LoadBasePtr, LoadAlign, NumBytes,
+ /*isVolatile=*/false, AATags.TBAA, AATags.Scope, AATags.NoAlias);
else
- NewCall = Builder.CreateMemCpy(StoreBasePtr, StoreAlign, LoadBasePtr,
- LoadAlign, NumBytes);
+ NewCall =
+ Builder.CreateMemCpy(StoreBasePtr, StoreAlign, LoadBasePtr, LoadAlign,
+ NumBytes, /*isVolatile=*/false, AATags.TBAA,
+ AATags.TBAAStruct, AATags.Scope, AATags.NoAlias);
} else {
// For now don't support unordered atomic memmove.
if (UseMemMove)
@@ -1486,7 +1504,8 @@ bool LoopIdiomRecognize::processLoopStoreOfLoopLoad(
// have an alignment but non-atomic loads/stores may not.
NewCall = Builder.CreateElementUnorderedAtomicMemCpy(
StoreBasePtr, StoreAlign.getValue(), LoadBasePtr, LoadAlign.getValue(),
- NumBytes, StoreSize);
+ NumBytes, StoreSize, AATags.TBAA, AATags.TBAAStruct, AATags.Scope,
+ AATags.NoAlias);
}
NewCall->setDebugLoc(TheStore->getDebugLoc());