diff options
Diffstat (limited to 'test/CodeGen/X86/widen_conv-1.ll')
-rw-r--r-- | test/CodeGen/X86/widen_conv-1.ll | 12 |
1 files changed, 8 insertions, 4 deletions
diff --git a/test/CodeGen/X86/widen_conv-1.ll b/test/CodeGen/X86/widen_conv-1.ll index a672e84fcde40..c548fc2c77e4a 100644 --- a/test/CodeGen/X86/widen_conv-1.ll +++ b/test/CodeGen/X86/widen_conv-1.ll @@ -35,7 +35,8 @@ define void @convert_v3i32_to_v3i8(<3 x i8>* %dst.addr, <3 x i32>* %src.addr) no ; X86-NEXT: movl {{[0-9]+}}(%esp), %eax ; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx ; X86-NEXT: movdqa (%ecx), %xmm0 -; X86-NEXT: paddd {{\.LCPI.*}}, %xmm0 +; X86-NEXT: pcmpeqd %xmm1, %xmm1 +; X86-NEXT: psubd %xmm1, %xmm0 ; X86-NEXT: pextrb $8, %xmm0, 2(%eax) ; X86-NEXT: pshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u] ; X86-NEXT: pextrw $0, %xmm0, (%eax) @@ -45,7 +46,8 @@ define void @convert_v3i32_to_v3i8(<3 x i8>* %dst.addr, <3 x i32>* %src.addr) no ; X64-LABEL: convert_v3i32_to_v3i8: ; X64: # BB#0: # %entry ; X64-NEXT: movdqa (%rsi), %xmm0 -; X64-NEXT: paddd {{.*}}(%rip), %xmm0 +; X64-NEXT: pcmpeqd %xmm1, %xmm1 +; X64-NEXT: psubd %xmm1, %xmm0 ; X64-NEXT: pextrb $8, %xmm0, 2(%rdi) ; X64-NEXT: pshufb {{.*#+}} xmm0 = xmm0[0,4,8,12,u,u,u,u,u,u,u,u,u,u,u,u] ; X64-NEXT: pextrw $0, %xmm0, (%rdi) @@ -70,7 +72,8 @@ define void @convert_v5i16_to_v5i8(<5 x i8>* %dst.addr, <5 x i16>* %src.addr) no ; X86-NEXT: movl 8(%ebp), %eax ; X86-NEXT: movl 12(%ebp), %ecx ; X86-NEXT: movdqa (%ecx), %xmm0 -; X86-NEXT: paddw {{\.LCPI.*}}, %xmm0 +; X86-NEXT: pcmpeqd %xmm1, %xmm1 +; X86-NEXT: psubw %xmm1, %xmm0 ; X86-NEXT: pextrb $8, %xmm0, 4(%eax) ; X86-NEXT: pshufb {{.*#+}} xmm0 = xmm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u] ; X86-NEXT: movd %xmm0, (%eax) @@ -81,7 +84,8 @@ define void @convert_v5i16_to_v5i8(<5 x i8>* %dst.addr, <5 x i16>* %src.addr) no ; X64-LABEL: convert_v5i16_to_v5i8: ; X64: # BB#0: # %entry ; X64-NEXT: movdqa (%rsi), %xmm0 -; X64-NEXT: paddw {{.*}}(%rip), %xmm0 +; X64-NEXT: pcmpeqd %xmm1, %xmm1 +; X64-NEXT: psubw %xmm1, %xmm0 ; X64-NEXT: pextrb $8, %xmm0, 4(%rdi) ; X64-NEXT: pshufb {{.*#+}} xmm0 = xmm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u] ; X64-NEXT: movd %xmm0, (%rdi) |