aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRoman Lebedev <lebedev.ri@gmail.com>2021-04-11 17:20:59 +0300
committerRoman Lebedev <lebedev.ri@gmail.com>2021-04-11 18:08:08 +0300
commit07c13b3b5f67b9518323864723a8797cc6d60a43 (patch)
tree8db2fd1fb6908901fb10227594efd5924cc3bec8
parent[InstCombine] (X | Op01C) + Op1C --> X + (Op01C + Op1C) iff the or is actuall... (diff)
downloadllvm-project-07c13b3b5f67b9518323864723a8797cc6d60a43.tar.gz
llvm-project-07c13b3b5f67b9518323864723a8797cc6d60a43.tar.bz2
llvm-project-07c13b3b5f67b9518323864723a8797cc6d60a43.zip
[NFC][InstCombine] Add tests for "get low bit mask upto and including bit X" pattern
-rw-r--r--llvm/test/Transforms/InstCombine/get-lowbitmask-upto-and-including-bit.ll284
1 files changed, 284 insertions, 0 deletions
diff --git a/llvm/test/Transforms/InstCombine/get-lowbitmask-upto-and-including-bit.ll b/llvm/test/Transforms/InstCombine/get-lowbitmask-upto-and-including-bit.ll
new file mode 100644
index 000000000000..88ba54a930d0
--- /dev/null
+++ b/llvm/test/Transforms/InstCombine/get-lowbitmask-upto-and-including-bit.ll
@@ -0,0 +1,284 @@
+; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
+; RUN: opt < %s -instcombine -S | FileCheck %s
+
+declare void @use8(i8)
+
+; Basic test
+define i8 @t0(i8 %x) {
+; CHECK-LABEL: @t0(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask = shl i8 1, %x
+ %lowbitmask = add i8 %bitmask, -1
+ %mask = or i8 %lowbitmask, %bitmask
+ ret i8 %mask
+}
+
+; Same, but different bit width
+define i16 @t1(i16 %x) {
+; CHECK-LABEL: @t1(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl i16 1, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i16 [[BITMASK]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i16 [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret i16 [[MASK]]
+;
+ %bitmask = shl i16 1, %x
+ %lowbitmask = add i16 %bitmask, -1
+ %mask = or i16 %lowbitmask, %bitmask
+ ret i16 %mask
+}
+
+; Vectors
+define <2 x i8> @t2_vec(<2 x i8> %x) {
+; CHECK-LABEL: @t2_vec(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl <2 x i8> <i8 1, i8 1>, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add <2 x i8> [[BITMASK]], <i8 -1, i8 -1>
+; CHECK-NEXT: [[MASK:%.*]] = or <2 x i8> [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret <2 x i8> [[MASK]]
+;
+ %bitmask = shl <2 x i8> <i8 1, i8 1>, %x
+ %lowbitmask = add <2 x i8> %bitmask, <i8 -1, i8 -1>
+ %mask = or <2 x i8> %lowbitmask, %bitmask
+ ret <2 x i8> %mask
+}
+define <3 x i8> @t3_vec_undef0(<3 x i8> %x) {
+; CHECK-LABEL: @t3_vec_undef0(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl <3 x i8> <i8 1, i8 undef, i8 1>, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add <3 x i8> [[BITMASK]], <i8 -1, i8 -1, i8 -1>
+; CHECK-NEXT: [[MASK:%.*]] = or <3 x i8> [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret <3 x i8> [[MASK]]
+;
+ %bitmask = shl <3 x i8> <i8 1, i8 undef, i8 1>, %x
+ %lowbitmask = add <3 x i8> %bitmask, <i8 -1, i8 -1, i8 -1>
+ %mask = or <3 x i8> %lowbitmask, %bitmask
+ ret <3 x i8> %mask
+}
+define <3 x i8> @t4_vec_undef1(<3 x i8> %x) {
+; CHECK-LABEL: @t4_vec_undef1(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl <3 x i8> <i8 1, i8 1, i8 1>, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add <3 x i8> [[BITMASK]], <i8 -1, i8 undef, i8 -1>
+; CHECK-NEXT: [[MASK:%.*]] = or <3 x i8> [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret <3 x i8> [[MASK]]
+;
+ %bitmask = shl <3 x i8> <i8 1, i8 1, i8 1>, %x
+ %lowbitmask = add <3 x i8> %bitmask, <i8 -1, i8 undef, i8 -1>
+ %mask = or <3 x i8> %lowbitmask, %bitmask
+ ret <3 x i8> %mask
+}
+define <3 x i8> @t5_vec_undef2(<3 x i8> %x) {
+; CHECK-LABEL: @t5_vec_undef2(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl <3 x i8> <i8 1, i8 1, i8 undef>, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add <3 x i8> [[BITMASK]], <i8 -1, i8 undef, i8 -1>
+; CHECK-NEXT: [[MASK:%.*]] = or <3 x i8> [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret <3 x i8> [[MASK]]
+;
+ %bitmask = shl <3 x i8> <i8 1, i8 1, i8 undef>, %x
+ %lowbitmask = add <3 x i8> %bitmask, <i8 -1, i8 undef, i8 -1>
+ %mask = or <3 x i8> %lowbitmask, %bitmask
+ ret <3 x i8> %mask
+}
+
+; One-use tests
+define i8 @t6_extrause0(i8 %x) {
+; CHECK-LABEL: @t6_extrause0(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK]])
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask = shl i8 1, %x
+ call void @use8(i8 %bitmask)
+ %lowbitmask = add i8 %bitmask, -1
+ %mask = or i8 %lowbitmask, %bitmask
+ ret i8 %mask
+}
+define i8 @t7_extrause1(i8 %x) {
+; CHECK-LABEL: @t7_extrause1(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK]], -1
+; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask = shl i8 1, %x
+ %lowbitmask = add i8 %bitmask, -1
+ call void @use8(i8 %lowbitmask)
+ %mask = or i8 %lowbitmask, %bitmask
+ ret i8 %mask
+}
+define i8 @t8_extrause2(i8 %x) {
+; CHECK-LABEL: @t8_extrause2(
+; CHECK-NEXT: [[BITMASK:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK]])
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK]], -1
+; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask = shl i8 1, %x
+ call void @use8(i8 %bitmask)
+ %lowbitmask = add i8 %bitmask, -1
+ call void @use8(i8 %lowbitmask)
+ %mask = or i8 %lowbitmask, %bitmask
+ ret i8 %mask
+}
+
+; Non-CSE'd test
+define i8 @t9_nocse(i8 %x) {
+; CHECK-LABEL: @t9_nocse(
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ %bitmask1 = shl i8 1, %x
+ %lowbitmask = add i8 %bitmask0, -1
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+
+; Non-CSE'd extra uses test
+define i8 @t10_nocse_extrause0(i8 %x) {
+; CHECK-LABEL: @t10_nocse_extrause0(
+; CHECK-NEXT: [[BITMASK0:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK0]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK1]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ call void @use8(i8 %bitmask0)
+ %bitmask1 = shl i8 1, %x
+ %lowbitmask = add i8 %bitmask0, -1
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+define i8 @t11_nocse_extrause1(i8 %x) {
+; CHECK-LABEL: @t11_nocse_extrause1(
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
+; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ %bitmask1 = shl i8 1, %x
+ call void @use8(i8 %bitmask1)
+ %lowbitmask = add i8 %bitmask0, -1
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+define i8 @t12_nocse_extrause2(i8 %x) {
+; CHECK-LABEL: @t12_nocse_extrause2(
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
+; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ %bitmask1 = shl i8 1, %x
+ %lowbitmask = add i8 %bitmask0, -1
+ call void @use8(i8 %lowbitmask)
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+define i8 @t13_nocse_extrause3(i8 %x) {
+; CHECK-LABEL: @t13_nocse_extrause3(
+; CHECK-NEXT: [[BITMASK0:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK0]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK1]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ call void @use8(i8 %bitmask0)
+ %bitmask1 = shl i8 1, %x
+ call void @use8(i8 %bitmask1)
+ %lowbitmask = add i8 %bitmask0, -1
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+define i8 @t14_nocse_extrause4(i8 %x) {
+; CHECK-LABEL: @t14_nocse_extrause4(
+; CHECK-NEXT: [[BITMASK0:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK0]], -1
+; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK1]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ call void @use8(i8 %bitmask0)
+ %bitmask1 = shl i8 1, %x
+ %lowbitmask = add i8 %bitmask0, -1
+ call void @use8(i8 %lowbitmask)
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+define i8 @t15_nocse_extrause5(i8 %x) {
+; CHECK-LABEL: @t15_nocse_extrause5(
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
+; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
+; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ %bitmask1 = shl i8 1, %x
+ call void @use8(i8 %bitmask1)
+ %lowbitmask = add i8 %bitmask0, -1
+ call void @use8(i8 %lowbitmask)
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+define i8 @t16_nocse_extrause6(i8 %x) {
+; CHECK-LABEL: @t16_nocse_extrause6(
+; CHECK-NEXT: [[BITMASK0:%.*]] = shl i8 1, [[X:%.*]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK0]])
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X]]
+; CHECK-NEXT: call void @use8(i8 [[BITMASK1]])
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = add i8 [[BITMASK0]], -1
+; CHECK-NEXT: call void @use8(i8 [[LOWBITMASK]])
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[LOWBITMASK]], [[BITMASK1]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x
+ call void @use8(i8 %bitmask0)
+ %bitmask1 = shl i8 1, %x
+ call void @use8(i8 %bitmask1)
+ %lowbitmask = add i8 %bitmask0, -1
+ call void @use8(i8 %lowbitmask)
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}
+
+; Non-CSE'd test with mismatching X's.
+define i8 @t17_nocse_mismatching_x(i8 %x0, i8 %x1) {
+; CHECK-LABEL: @t17_nocse_mismatching_x(
+; CHECK-NEXT: [[BITMASK1:%.*]] = shl i8 1, [[X1:%.*]]
+; CHECK-NEXT: [[NOTMASK:%.*]] = shl nsw i8 -1, [[X0:%.*]]
+; CHECK-NEXT: [[LOWBITMASK:%.*]] = xor i8 [[NOTMASK]], -1
+; CHECK-NEXT: [[MASK:%.*]] = or i8 [[BITMASK1]], [[LOWBITMASK]]
+; CHECK-NEXT: ret i8 [[MASK]]
+;
+ %bitmask0 = shl i8 1, %x0
+ %bitmask1 = shl i8 1, %x1
+ %lowbitmask = add i8 %bitmask0, -1
+ %mask = or i8 %lowbitmask, %bitmask1
+ ret i8 %mask
+}