diff --git a/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp b/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp index 624a82716185a..8d2d23db230be 100644 --- a/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp +++ b/llvm/lib/Target/AArch64/AArch64ISelLowering.cpp @@ -22653,7 +22653,12 @@ static SDValue performPostLD1Combine(SDNode *N, static bool performTBISimplification(SDValue Addr, TargetLowering::DAGCombinerInfo &DCI, SelectionDAG &DAG) { - APInt DemandedMask = APInt::getLowBitsSet(64, 56); + const auto &Subtarget = DAG.getSubtarget(); + // If MTE is enabled, TBI only applies to the top 4 bits. + // Normal arm64 processes on Darwin may be run with MTE enabled. + unsigned NumIgnoreBits = + Subtarget.hasMTE() || Subtarget.isTargetDarwin() ? 4 : 8; + APInt DemandedMask = APInt::getLowBitsSet(64, 64 - NumIgnoreBits); KnownBits Known; TargetLowering::TargetLoweringOpt TLO(DAG, !DCI.isBeforeLegalize(), !DCI.isBeforeLegalizeOps()); diff --git a/llvm/test/CodeGen/AArch64/tbi.ll b/llvm/test/CodeGen/AArch64/tbi.ll index 285726a485b87..a7c9b4cddf808 100644 --- a/llvm/test/CodeGen/AArch64/tbi.ll +++ b/llvm/test/CodeGen/AArch64/tbi.ll @@ -7,7 +7,7 @@ ; TBI-NOT: and x ; NO_TBI: and x define i32 @ld_and32(i64 %p) { - %and = and i64 %p, 72057594037927935 + %and = and i64 %p, 1152921504606846975 %cast = inttoptr i64 %and to ptr %load = load i32, ptr %cast ret i32 %load @@ -18,7 +18,7 @@ define i32 @ld_and32(i64 %p) { ; TBI-NOT: and x ; NO_TBI: and x define i32 @ld_and_plus_offset(i64 %p) { - %and = and i64 %p, 72057594037927935 + %and = and i64 %p, 1152921504606846975 %cast = inttoptr i64 %and to ptr %gep = getelementptr i32, ptr %cast, i64 4 %load = load i32, ptr %gep @@ -40,7 +40,7 @@ define i32 @ld_and32_wider(i64 %p) { ; TBI-NOT: and x ; NO_TBI: and x define i64 @ld_and64(i64 %p) { - %and = and i64 %p, 72057594037927935 + %and = and i64 %p, 1152921504606846975 %cast = inttoptr i64 %and to ptr %load = load i64, ptr %cast ret i64 %load @@ -50,7 +50,7 @@ define i64 @ld_and64(i64 %p) { ; TBI-NOT: and x ; NO_TBI: and x define void @st_and32(i64 %p, i32 %v) { - %and = and i64 %p, 72057594037927935 + %and = and i64 %p, 1152921504606846975 %cast = inttoptr i64 %and to ptr store i32 %v, ptr %cast ret void @@ -62,7 +62,7 @@ define void @st_and32(i64 %p, i32 %v) { ; NO_TBI: and x define i32 @ld_ro(i64 %a, i64 %b) { %p = add i64 %a, %b - %and = and i64 %p, 72057594037927935 + %and = and i64 %p, 1152921504606846975 %cast = inttoptr i64 %and to ptr %load = load i32, ptr %cast ret i32 %load @@ -73,7 +73,7 @@ define i32 @ld_ro(i64 %a, i64 %b) { ; TBI-NOT: and x ; NO_TBI: and x define i32 @ld_ro2(i64 %a, i64 %b) { - %and = and i64 %a, 72057594037927935 + %and = and i64 %a, 1152921504606846975 %p = add i64 %and, %b %cast = inttoptr i64 %p to ptr %load = load i32, ptr %cast @@ -85,7 +85,7 @@ define i32 @ld_ro2(i64 %a, i64 %b) { ; TBI-NOT: and x ; NO_TBI: and x define i32 @ld_indirect_and(i64 %r1, i64 %r2) { - %and = and i64 %r1, 72057594037927935 + %and = and i64 %r1, 1152921504606846975 %p = or i64 %and, %r2 %cast = inttoptr i64 %p to ptr %load = load i32, ptr %cast