aboutsummaryrefslogtreecommitdiff
path: root/test/Transforms/InstCombine/trunc.ll
diff options
context:
space:
mode:
Diffstat (limited to 'test/Transforms/InstCombine/trunc.ll')
-rw-r--r--test/Transforms/InstCombine/trunc.ll99
1 files changed, 99 insertions, 0 deletions
diff --git a/test/Transforms/InstCombine/trunc.ll b/test/Transforms/InstCombine/trunc.ll
new file mode 100644
index 000000000000..f98bfd9236cd
--- /dev/null
+++ b/test/Transforms/InstCombine/trunc.ll
@@ -0,0 +1,99 @@
+; RUN: opt < %s -instcombine -S | FileCheck %s
+target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64"
+
+; Instcombine should be able to eliminate all of these ext casts.
+
+declare void @use(i32)
+
+define i64 @test1(i64 %a) {
+ %b = trunc i64 %a to i32
+ %c = and i32 %b, 15
+ %d = zext i32 %c to i64
+ call void @use(i32 %b)
+ ret i64 %d
+; CHECK: @test1
+; CHECK: %d = and i64 %a, 15
+; CHECK: ret i64 %d
+}
+define i64 @test2(i64 %a) {
+ %b = trunc i64 %a to i32
+ %c = shl i32 %b, 4
+ %q = ashr i32 %c, 4
+ %d = sext i32 %q to i64
+ call void @use(i32 %b)
+ ret i64 %d
+; CHECK: @test2
+; CHECK: shl i64 %a, 36
+; CHECK: %d = ashr i64 {{.*}}, 36
+; CHECK: ret i64 %d
+}
+define i64 @test3(i64 %a) {
+ %b = trunc i64 %a to i32
+ %c = and i32 %b, 8
+ %d = zext i32 %c to i64
+ call void @use(i32 %b)
+ ret i64 %d
+; CHECK: @test3
+; CHECK: %d = and i64 %a, 8
+; CHECK: ret i64 %d
+}
+define i64 @test4(i64 %a) {
+ %b = trunc i64 %a to i32
+ %c = and i32 %b, 8
+ %x = xor i32 %c, 8
+ %d = zext i32 %x to i64
+ call void @use(i32 %b)
+ ret i64 %d
+; CHECK: @test4
+; CHECK: = and i64 %a, 8
+; CHECK: %d = xor i64 {{.*}}, 8
+; CHECK: ret i64 %d
+}
+
+define i32 @test5(i32 %A) {
+ %B = zext i32 %A to i128
+ %C = lshr i128 %B, 16
+ %D = trunc i128 %C to i32
+ ret i32 %D
+; CHECK: @test5
+; CHECK: %C = lshr i32 %A, 16
+; CHECK: ret i32 %C
+}
+
+define i32 @test6(i64 %A) {
+ %B = zext i64 %A to i128
+ %C = lshr i128 %B, 32
+ %D = trunc i128 %C to i32
+ ret i32 %D
+; CHECK: @test6
+; CHECK: %C = lshr i64 %A, 32
+; CHECK: %D = trunc i64 %C to i32
+; CHECK: ret i32 %D
+}
+
+define i92 @test7(i64 %A) {
+ %B = zext i64 %A to i128
+ %C = lshr i128 %B, 32
+ %D = trunc i128 %C to i92
+ ret i92 %D
+; CHECK: @test7
+; CHECK: %B = zext i64 %A to i92
+; CHECK: %C = lshr i92 %B, 32
+; CHECK: ret i92 %C
+}
+
+define i64 @test8(i32 %A, i32 %B) {
+ %tmp38 = zext i32 %A to i128
+ %tmp32 = zext i32 %B to i128
+ %tmp33 = shl i128 %tmp32, 32
+ %ins35 = or i128 %tmp33, %tmp38
+ %tmp42 = trunc i128 %ins35 to i64
+ ret i64 %tmp42
+; CHECK: @test8
+; CHECK: %tmp38 = zext i32 %A to i64
+; CHECK: %tmp32 = zext i32 %B to i64
+; CHECK: %tmp33 = shl i64 %tmp32, 32
+; CHECK: %ins35 = or i64 %tmp33, %tmp38
+; CHECK: ret i64 %ins35
+}
+