summaryrefslogtreecommitdiff
path: root/test/CodeGen/X86/GlobalISel/memop-scalar.ll
diff options
context:
space:
mode:
Diffstat (limited to 'test/CodeGen/X86/GlobalISel/memop-scalar.ll')
-rw-r--r--test/CodeGen/X86/GlobalISel/memop-scalar.ll146
1 files changed, 146 insertions, 0 deletions
diff --git a/test/CodeGen/X86/GlobalISel/memop-scalar.ll b/test/CodeGen/X86/GlobalISel/memop-scalar.ll
new file mode 100644
index 000000000000..3e45a9c9a49d
--- /dev/null
+++ b/test/CodeGen/X86/GlobalISel/memop-scalar.ll
@@ -0,0 +1,146 @@
+; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
+; RUN: llc -mtriple=x86_64-linux-gnu -global-isel < %s -o - | FileCheck %s --check-prefix=ALL --check-prefix=SSE_FAST
+; RUN: llc -mtriple=x86_64-linux-gnu -regbankselect-greedy -global-isel < %s -o - | FileCheck %s --check-prefix=ALL --check-prefix=SSE_GREEDY
+
+define i8 @test_load_i8(i8 * %p1) {
+; ALL-LABEL: test_load_i8:
+; ALL: # BB#0:
+; ALL-NEXT: movb (%rdi), %al
+; ALL-NEXT: retq
+ %r = load i8, i8* %p1
+ ret i8 %r
+}
+
+define i16 @test_load_i16(i16 * %p1) {
+; ALL-LABEL: test_load_i16:
+; ALL: # BB#0:
+; ALL-NEXT: movzwl (%rdi), %eax
+; ALL-NEXT: retq
+ %r = load i16, i16* %p1
+ ret i16 %r
+}
+
+define i32 @test_load_i32(i32 * %p1) {
+; ALL-LABEL: test_load_i32:
+; ALL: # BB#0:
+; ALL-NEXT: movl (%rdi), %eax
+; ALL-NEXT: retq
+ %r = load i32, i32* %p1
+ ret i32 %r
+}
+
+define i64 @test_load_i64(i64 * %p1) {
+; ALL-LABEL: test_load_i64:
+; ALL: # BB#0:
+; ALL-NEXT: movq (%rdi), %rax
+; ALL-NEXT: retq
+ %r = load i64, i64* %p1
+ ret i64 %r
+}
+
+define float @test_load_float(float * %p1) {
+; SSE-LABEL: test_load_float:
+; SSE: # BB#0:
+; SSE-NEXT: movl (%rdi), %eax
+; SSE-NEXT: movd %eax, %xmm0
+; SSE-NEXT: retq
+;
+; ALL_AVX-LABEL: test_load_float:
+; ALL_AVX: # BB#0:
+; ALL_AVX-NEXT: movl (%rdi), %eax
+; ALL_AVX-NEXT: vmovd %eax, %xmm0
+; ALL_AVX-NEXT: retq
+ %r = load float, float* %p1
+ ret float %r
+}
+
+define double @test_load_double(double * %p1) {
+; SSE-LABEL: test_load_double:
+; SSE: # BB#0:
+; SSE-NEXT: movq (%rdi), %rax
+; SSE-NEXT: movq %rax, %xmm0
+; SSE-NEXT: retq
+;
+; ALL_AVX-LABEL: test_load_double:
+; ALL_AVX: # BB#0:
+; ALL_AVX-NEXT: movq (%rdi), %rax
+; ALL_AVX-NEXT: vmovq %rax, %xmm0
+; ALL_AVX-NEXT: retq
+ %r = load double, double* %p1
+ ret double %r
+}
+
+define i32 * @test_store_i32(i32 %val, i32 * %p1) {
+; ALL-LABEL: test_store_i32:
+; ALL: # BB#0:
+; ALL-NEXT: movl %edi, (%rsi)
+; ALL-NEXT: movq %rsi, %rax
+; ALL-NEXT: retq
+ store i32 %val, i32* %p1
+ ret i32 * %p1;
+}
+
+define i64 * @test_store_i64(i64 %val, i64 * %p1) {
+; ALL-LABEL: test_store_i64:
+; ALL: # BB#0:
+; ALL-NEXT: movq %rdi, (%rsi)
+; ALL-NEXT: movq %rsi, %rax
+; ALL-NEXT: retq
+ store i64 %val, i64* %p1
+ ret i64 * %p1;
+}
+
+define float * @test_store_float(float %val, float * %p1) {
+;
+; SSE_FAST-LABEL: test_store_float:
+; SSE_FAST: # BB#0:
+; SSE_FAST-NEXT: movd %xmm0, %eax
+; SSE_FAST-NEXT: movl %eax, (%rdi)
+; SSE_FAST-NEXT: movq %rdi, %rax
+; SSE_FAST-NEXT: retq
+;
+; SSE_GREEDY-LABEL: test_store_float:
+; SSE_GREEDY: # BB#0:
+; SSE_GREEDY-NEXT: movss %xmm0, (%rdi)
+; SSE_GREEDY-NEXT: movq %rdi, %rax
+; SSE_GREEDY-NEXT: retq
+ store float %val, float* %p1
+ ret float * %p1;
+}
+
+define double * @test_store_double(double %val, double * %p1) {
+;
+; SSE_FAST-LABEL: test_store_double:
+; SSE_FAST: # BB#0:
+; SSE_FAST-NEXT: movq %xmm0, %rax
+; SSE_FAST-NEXT: movq %rax, (%rdi)
+; SSE_FAST-NEXT: movq %rdi, %rax
+; SSE_FAST-NEXT: retq
+;
+; SSE_GREEDY-LABEL: test_store_double:
+; SSE_GREEDY: # BB#0:
+; SSE_GREEDY-NEXT: movsd %xmm0, (%rdi)
+; SSE_GREEDY-NEXT: movq %rdi, %rax
+; SSE_GREEDY-NEXT: retq
+;
+ store double %val, double* %p1
+ ret double * %p1;
+}
+
+define i32* @test_load_ptr(i32** %ptr1) {
+; ALL-LABEL: test_load_ptr:
+; ALL: # BB#0:
+; ALL-NEXT: movq (%rdi), %rax
+; ALL-NEXT: retq
+ %p = load i32*, i32** %ptr1
+ ret i32* %p
+}
+
+define void @test_store_ptr(i32** %ptr1, i32* %a) {
+; ALL-LABEL: test_store_ptr:
+; ALL: # BB#0:
+; ALL-NEXT: movq %rsi, (%rdi)
+; ALL-NEXT: retq
+ store i32* %a, i32** %ptr1
+ ret void
+}