summaryrefslogtreecommitdiff
path: root/test/Transforms/Inline
diff options
context:
space:
mode:
Diffstat (limited to 'test/Transforms/Inline')
-rw-r--r--test/Transforms/Inline/inline-cold.ll20
-rw-r--r--test/Transforms/Inline/inline-constexpr-addrspacecast-argument.ll2
-rw-r--r--test/Transforms/Inline/partial-inline-act.ll2
-rw-r--r--test/Transforms/Inline/prof-update.ll35
4 files changed, 28 insertions, 31 deletions
diff --git a/test/Transforms/Inline/inline-cold.ll b/test/Transforms/Inline/inline-cold.ll
index 93d2569d87ad..e0e679ad4036 100644
--- a/test/Transforms/Inline/inline-cold.ll
+++ b/test/Transforms/Inline/inline-cold.ll
@@ -1,4 +1,4 @@
-; RUN: opt < %s -inline -S -inlinecold-threshold=75 | FileCheck %s
+; RUN: opt < %s -inline -S -inlinecold-threshold=25 | FileCheck %s
; Test that functions with attribute Cold are not inlined while the
; same function without attribute Cold will be inlined.
@@ -64,23 +64,7 @@ entry:
%x3 = add i32 %x2, %a3
%a4 = load volatile i32, i32* @a
%x4 = add i32 %x3, %a4
- %a5 = load volatile i32, i32* @a
- %x5 = add i32 %x4, %a5
- %a6 = load volatile i32, i32* @a
- %x6 = add i32 %x5, %a6
- %a7 = load volatile i32, i32* @a
- %x7 = add i32 %x6, %a6
- %a8 = load volatile i32, i32* @a
- %x8 = add i32 %x7, %a8
- %a9 = load volatile i32, i32* @a
- %x9 = add i32 %x8, %a9
- %a10 = load volatile i32, i32* @a
- %x10 = add i32 %x9, %a10
- %a11 = load volatile i32, i32* @a
- %x11 = add i32 %x10, %a11
- %a12 = load volatile i32, i32* @a
- %x12 = add i32 %x11, %a12
- %add = add i32 %x12, %a
+ %add = add i32 %x4, %a
ret i32 %add
}
diff --git a/test/Transforms/Inline/inline-constexpr-addrspacecast-argument.ll b/test/Transforms/Inline/inline-constexpr-addrspacecast-argument.ll
index 1f2b143c97ee..b8d41abe1c35 100644
--- a/test/Transforms/Inline/inline-constexpr-addrspacecast-argument.ll
+++ b/test/Transforms/Inline/inline-constexpr-addrspacecast-argument.ll
@@ -6,7 +6,7 @@ target datalayout = "e-p3:32:32-p4:64:64-n32"
@lds = internal addrspace(3) global [64 x i64] zeroinitializer
; CHECK-LABEL: @constexpr_addrspacecast_ptr_size_change(
-; CHECK: load i64, i64 addrspace(4)* getelementptr (i64, i64 addrspace(4)* addrspacecast (i64 addrspace(3)* getelementptr inbounds ([64 x i64], [64 x i64] addrspace(3)* @lds, i32 0, i32 0) to i64 addrspace(4)*), i64 undef)
+; CHECK: load i64, i64 addrspace(4)* addrspacecast (i64 addrspace(3)* getelementptr inbounds ([64 x i64], [64 x i64] addrspace(3)* @lds, i32 0, i32 0) to i64 addrspace(4)*)
; CHECK-NEXT: br
define void @constexpr_addrspacecast_ptr_size_change() #0 {
%tmp0 = call i32 @foo(i64 addrspace(4)* addrspacecast (i64 addrspace(3)* getelementptr inbounds ([64 x i64], [64 x i64] addrspace(3)* @lds, i32 0, i32 0) to i64 addrspace(4)*)) #1
diff --git a/test/Transforms/Inline/partial-inline-act.ll b/test/Transforms/Inline/partial-inline-act.ll
index 916436260bd6..27e719153875 100644
--- a/test/Transforms/Inline/partial-inline-act.ll
+++ b/test/Transforms/Inline/partial-inline-act.ll
@@ -1,4 +1,4 @@
-; RUN: opt < %s -partial-inliner -disable-output
+; RUN: opt < %s -partial-inliner -skip-partial-inlining-cost-analysis -disable-output
; This testcase tests the assumption cache
define internal i32 @inlinedFunc(i1 %cond, i32* align 4 %align.val) {
diff --git a/test/Transforms/Inline/prof-update.ll b/test/Transforms/Inline/prof-update.ll
index 3fefa1c56cea..4a4471e8e17a 100644
--- a/test/Transforms/Inline/prof-update.ll
+++ b/test/Transforms/Inline/prof-update.ll
@@ -6,21 +6,21 @@ declare void @ext1();
@func = global void ()* null
; CHECK: define void @callee(i32 %n) !prof ![[ENTRY_COUNT:[0-9]*]]
-define void @callee(i32 %n) !prof !1 {
+define void @callee(i32 %n) !prof !15 {
%cond = icmp sle i32 %n, 10
br i1 %cond, label %cond_true, label %cond_false
cond_true:
; ext1 is optimized away, thus not updated.
; CHECK: call void @ext1(), !prof ![[COUNT_CALLEE1:[0-9]*]]
- call void @ext1(), !prof !2
+ call void @ext1(), !prof !16
ret void
cond_false:
; ext is cloned and updated.
; CHECK: call void @ext(), !prof ![[COUNT_CALLEE:[0-9]*]]
- call void @ext(), !prof !2
+ call void @ext(), !prof !16
%f = load void ()*, void ()** @func
; CHECK: call void %f(), !prof ![[COUNT_IND_CALLEE:[0-9]*]]
- call void %f(), !prof !4
+ call void %f(), !prof !18
ret void
}
@@ -28,16 +28,29 @@ cond_false:
define void @caller() {
; CHECK: call void @ext(), !prof ![[COUNT_CALLER:[0-9]*]]
; CHECK: call void %f.i(), !prof ![[COUNT_IND_CALLER:[0-9]*]]
- call void @callee(i32 15), !prof !3
+ call void @callee(i32 15), !prof !17
ret void
}
-!llvm.module.flags = !{!0}
-!0 = !{i32 1, !"MaxFunctionCount", i32 2000}
-!1 = !{!"function_entry_count", i64 1000}
-!2 = !{!"branch_weights", i64 2000}
-!3 = !{!"branch_weights", i64 400}
-!4 = !{!"VP", i32 0, i64 140, i64 111, i64 80, i64 222, i64 40, i64 333, i64 20}
+!llvm.module.flags = !{!1}
+!1 = !{i32 1, !"ProfileSummary", !2}
+!2 = !{!3, !4, !5, !6, !7, !8, !9, !10}
+!3 = !{!"ProfileFormat", !"SampleProfile"}
+!4 = !{!"TotalCount", i64 10000}
+!5 = !{!"MaxCount", i64 10}
+!6 = !{!"MaxInternalCount", i64 1}
+!7 = !{!"MaxFunctionCount", i64 2000}
+!8 = !{!"NumCounts", i64 2}
+!9 = !{!"NumFunctions", i64 2}
+!10 = !{!"DetailedSummary", !11}
+!11 = !{!12, !13, !14}
+!12 = !{i32 10000, i64 100, i32 1}
+!13 = !{i32 999000, i64 100, i32 1}
+!14 = !{i32 999999, i64 1, i32 2}
+!15 = !{!"function_entry_count", i64 1000}
+!16 = !{!"branch_weights", i64 2000}
+!17 = !{!"branch_weights", i64 400}
+!18 = !{!"VP", i32 0, i64 140, i64 111, i64 80, i64 222, i64 40, i64 333, i64 20}
attributes #0 = { alwaysinline }
; CHECK: ![[ENTRY_COUNT]] = !{!"function_entry_count", i64 600}
; CHECK: ![[COUNT_CALLEE1]] = !{!"branch_weights", i64 2000}