summaryrefslogtreecommitdiff
path: root/test/CodeGen/builtins-ppc-vsx.c
diff options
context:
space:
mode:
Diffstat (limited to 'test/CodeGen/builtins-ppc-vsx.c')
-rw-r--r--test/CodeGen/builtins-ppc-vsx.c177
1 files changed, 177 insertions, 0 deletions
diff --git a/test/CodeGen/builtins-ppc-vsx.c b/test/CodeGen/builtins-ppc-vsx.c
index 58a8cc32dce30..631cb6ccafbed 100644
--- a/test/CodeGen/builtins-ppc-vsx.c
+++ b/test/CodeGen/builtins-ppc-vsx.c
@@ -7,6 +7,7 @@ vector float vf = { -1.5, 2.5, -3.5, 4.5 };
vector double vd = { 3.5, -7.5 };
vector signed int vsi = { -1, 2, -3, 4 };
vector unsigned int vui = { 0, 1, 2, 3 };
+vector bool long long vbll = { 1, 0 };
vector signed long long vsll = { 255LL, -937LL };
vector unsigned long long vull = { 1447LL, 2894LL };
double d = 23.4;
@@ -15,6 +16,7 @@ vector float res_vf;
vector double res_vd;
vector signed int res_vsi;
vector unsigned int res_vui;
+vector bool long long res_vbll;
vector signed long long res_vsll;
vector unsigned long long res_vull;
double res_d;
@@ -113,4 +115,179 @@ void test1() {
vec_vsx_st(vd, 0, &res_vd);
// CHECK: @llvm.ppc.vsx.stxvd2x
+
+ /* vec_and */
+ res_vsll = vec_and(vsll, vsll);
+// CHECK: and <2 x i64>
+
+ res_vsll = vec_and(vbll, vsll);
+// CHECK: and <2 x i64>
+
+ res_vsll = vec_and(vsll, vbll);
+// CHECK: and <2 x i64>
+
+ res_vull = vec_and(vull, vull);
+// CHECK: and <2 x i64>
+
+ res_vull = vec_and(vbll, vull);
+// CHECK: and <2 x i64>
+
+ res_vull = vec_and(vull, vbll);
+// CHECK: and <2 x i64>
+
+ res_vbll = vec_and(vbll, vbll);
+// CHECK: and <2 x i64>
+
+ /* vec_vand */
+ res_vsll = vec_vand(vsll, vsll);
+// CHECK: and <2 x i64>
+
+ res_vsll = vec_vand(vbll, vsll);
+// CHECK: and <2 x i64>
+
+ res_vsll = vec_vand(vsll, vbll);
+// CHECK: and <2 x i64>
+
+ res_vull = vec_vand(vull, vull);
+// CHECK: and <2 x i64>
+
+ res_vull = vec_vand(vbll, vull);
+// CHECK: and <2 x i64>
+
+ res_vull = vec_vand(vull, vbll);
+// CHECK: and <2 x i64>
+
+ res_vbll = vec_vand(vbll, vbll);
+// CHECK: and <2 x i64>
+
+ /* vec_andc */
+ res_vsll = vec_andc(vsll, vsll);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ res_vsll = vec_andc(vbll, vsll);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ res_vsll = vec_andc(vsll, vbll);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ res_vull = vec_andc(vull, vull);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ res_vull = vec_andc(vbll, vull);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ res_vull = vec_andc(vull, vbll);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ res_vbll = vec_andc(vbll, vbll);
+// CHECK: xor <2 x i64>
+// CHECK: and <2 x i64>
+
+ /* vec_nor */
+ res_vsll = vec_nor(vsll, vsll);
+// CHECK: or <2 x i64>
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_nor(vull, vull);
+// CHECK: or <2 x i64>
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_nor(vbll, vbll);
+// CHECK: or <2 x i64>
+// CHECK: xor <2 x i64>
+
+ /* vec_or */
+ res_vsll = vec_or(vsll, vsll);
+// CHECK: or <2 x i64>
+
+ res_vsll = vec_or(vbll, vsll);
+// CHECK: or <2 x i64>
+
+ res_vsll = vec_or(vsll, vbll);
+// CHECK: or <2 x i64>
+
+ res_vull = vec_or(vull, vull);
+// CHECK: or <2 x i64>
+
+ res_vull = vec_or(vbll, vull);
+// CHECK: or <2 x i64>
+
+ res_vull = vec_or(vull, vbll);
+// CHECK: or <2 x i64>
+
+ res_vbll = vec_or(vbll, vbll);
+// CHECK: or <2 x i64>
+
+ /* vec_vor */
+ res_vsll = vec_vor(vsll, vsll);
+// CHECK: or <2 x i64>
+
+ res_vsll = vec_vor(vbll, vsll);
+// CHECK: or <2 x i64>
+
+ res_vsll = vec_vor(vsll, vbll);
+// CHECK: or <2 x i64>
+
+ res_vull = vec_vor(vull, vull);
+// CHECK: or <2 x i64>
+
+ res_vull = vec_vor(vbll, vull);
+// CHECK: or <2 x i64>
+
+ res_vull = vec_vor(vull, vbll);
+// CHECK: or <2 x i64>
+
+ res_vbll = vec_vor(vbll, vbll);
+// CHECK: or <2 x i64>
+
+ /* vec_xor */
+ res_vsll = vec_xor(vsll, vsll);
+// CHECK: xor <2 x i64>
+
+ res_vsll = vec_xor(vbll, vsll);
+// CHECK: xor <2 x i64>
+
+ res_vsll = vec_xor(vsll, vbll);
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_xor(vull, vull);
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_xor(vbll, vull);
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_xor(vull, vbll);
+// CHECK: xor <2 x i64>
+
+ res_vbll = vec_xor(vbll, vbll);
+// CHECK: xor <2 x i64>
+
+ /* vec_vxor */
+ res_vsll = vec_vxor(vsll, vsll);
+// CHECK: xor <2 x i64>
+
+ res_vsll = vec_vxor(vbll, vsll);
+// CHECK: xor <2 x i64>
+
+ res_vsll = vec_vxor(vsll, vbll);
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_vxor(vull, vull);
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_vxor(vbll, vull);
+// CHECK: xor <2 x i64>
+
+ res_vull = vec_vxor(vull, vbll);
+// CHECK: xor <2 x i64>
+
+ res_vbll = vec_vxor(vbll, vbll);
+// CHECK: xor <2 x i64>
+
}