aboutsummaryrefslogtreecommitdiff
path: root/contrib/llvm-project/clang/lib/Sema/SemaX86.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/llvm-project/clang/lib/Sema/SemaX86.cpp')
-rw-r--r--contrib/llvm-project/clang/lib/Sema/SemaX86.cpp972
1 files changed, 972 insertions, 0 deletions
diff --git a/contrib/llvm-project/clang/lib/Sema/SemaX86.cpp b/contrib/llvm-project/clang/lib/Sema/SemaX86.cpp
new file mode 100644
index 000000000000..be26454ce909
--- /dev/null
+++ b/contrib/llvm-project/clang/lib/Sema/SemaX86.cpp
@@ -0,0 +1,972 @@
+//===------ SemaX86.cpp ---------- X86 target-specific routines -----------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements semantic analysis functions specific to X86.
+//
+//===----------------------------------------------------------------------===//
+
+#include "clang/Sema/SemaX86.h"
+#include "clang/Basic/DiagnosticSema.h"
+#include "clang/Basic/TargetBuiltins.h"
+#include "clang/Sema/Attr.h"
+#include "clang/Sema/ParsedAttr.h"
+#include "clang/Sema/Sema.h"
+#include "llvm/ADT/APSInt.h"
+#include "llvm/TargetParser/Triple.h"
+#include <bitset>
+
+namespace clang {
+
+SemaX86::SemaX86(Sema &S) : SemaBase(S) {}
+
+// Check if the rounding mode is legal.
+bool SemaX86::CheckBuiltinRoundingOrSAE(unsigned BuiltinID, CallExpr *TheCall) {
+ // Indicates if this instruction has rounding control or just SAE.
+ bool HasRC = false;
+
+ unsigned ArgNum = 0;
+ switch (BuiltinID) {
+ default:
+ return false;
+ case X86::BI__builtin_ia32_vcvttsd2si32:
+ case X86::BI__builtin_ia32_vcvttsd2si64:
+ case X86::BI__builtin_ia32_vcvttsd2usi32:
+ case X86::BI__builtin_ia32_vcvttsd2usi64:
+ case X86::BI__builtin_ia32_vcvttss2si32:
+ case X86::BI__builtin_ia32_vcvttss2si64:
+ case X86::BI__builtin_ia32_vcvttss2usi32:
+ case X86::BI__builtin_ia32_vcvttss2usi64:
+ case X86::BI__builtin_ia32_vcvttsh2si32:
+ case X86::BI__builtin_ia32_vcvttsh2si64:
+ case X86::BI__builtin_ia32_vcvttsh2usi32:
+ case X86::BI__builtin_ia32_vcvttsh2usi64:
+ ArgNum = 1;
+ break;
+ case X86::BI__builtin_ia32_maxpd512:
+ case X86::BI__builtin_ia32_maxps512:
+ case X86::BI__builtin_ia32_minpd512:
+ case X86::BI__builtin_ia32_minps512:
+ case X86::BI__builtin_ia32_maxph512:
+ case X86::BI__builtin_ia32_minph512:
+ ArgNum = 2;
+ break;
+ case X86::BI__builtin_ia32_vcvtph2pd512_mask:
+ case X86::BI__builtin_ia32_vcvtph2psx512_mask:
+ case X86::BI__builtin_ia32_cvtps2pd512_mask:
+ case X86::BI__builtin_ia32_cvttpd2dq512_mask:
+ case X86::BI__builtin_ia32_cvttpd2qq512_mask:
+ case X86::BI__builtin_ia32_cvttpd2udq512_mask:
+ case X86::BI__builtin_ia32_cvttpd2uqq512_mask:
+ case X86::BI__builtin_ia32_cvttps2dq512_mask:
+ case X86::BI__builtin_ia32_cvttps2qq512_mask:
+ case X86::BI__builtin_ia32_cvttps2udq512_mask:
+ case X86::BI__builtin_ia32_cvttps2uqq512_mask:
+ case X86::BI__builtin_ia32_vcvttph2w512_mask:
+ case X86::BI__builtin_ia32_vcvttph2uw512_mask:
+ case X86::BI__builtin_ia32_vcvttph2dq512_mask:
+ case X86::BI__builtin_ia32_vcvttph2udq512_mask:
+ case X86::BI__builtin_ia32_vcvttph2qq512_mask:
+ case X86::BI__builtin_ia32_vcvttph2uqq512_mask:
+ case X86::BI__builtin_ia32_getexppd512_mask:
+ case X86::BI__builtin_ia32_getexpps512_mask:
+ case X86::BI__builtin_ia32_getexpph512_mask:
+ case X86::BI__builtin_ia32_vcomisd:
+ case X86::BI__builtin_ia32_vcomiss:
+ case X86::BI__builtin_ia32_vcomish:
+ case X86::BI__builtin_ia32_vcvtph2ps512_mask:
+ ArgNum = 3;
+ break;
+ case X86::BI__builtin_ia32_cmppd512_mask:
+ case X86::BI__builtin_ia32_cmpps512_mask:
+ case X86::BI__builtin_ia32_cmpsd_mask:
+ case X86::BI__builtin_ia32_cmpss_mask:
+ case X86::BI__builtin_ia32_cmpsh_mask:
+ case X86::BI__builtin_ia32_vcvtsh2sd_round_mask:
+ case X86::BI__builtin_ia32_vcvtsh2ss_round_mask:
+ case X86::BI__builtin_ia32_cvtss2sd_round_mask:
+ case X86::BI__builtin_ia32_getexpsd128_round_mask:
+ case X86::BI__builtin_ia32_getexpss128_round_mask:
+ case X86::BI__builtin_ia32_getexpsh128_round_mask:
+ case X86::BI__builtin_ia32_getmantpd512_mask:
+ case X86::BI__builtin_ia32_getmantps512_mask:
+ case X86::BI__builtin_ia32_getmantph512_mask:
+ case X86::BI__builtin_ia32_maxsd_round_mask:
+ case X86::BI__builtin_ia32_maxss_round_mask:
+ case X86::BI__builtin_ia32_maxsh_round_mask:
+ case X86::BI__builtin_ia32_minsd_round_mask:
+ case X86::BI__builtin_ia32_minss_round_mask:
+ case X86::BI__builtin_ia32_minsh_round_mask:
+ case X86::BI__builtin_ia32_reducepd512_mask:
+ case X86::BI__builtin_ia32_reduceps512_mask:
+ case X86::BI__builtin_ia32_reduceph512_mask:
+ case X86::BI__builtin_ia32_rndscalepd_mask:
+ case X86::BI__builtin_ia32_rndscaleps_mask:
+ case X86::BI__builtin_ia32_rndscaleph_mask:
+ ArgNum = 4;
+ break;
+ case X86::BI__builtin_ia32_fixupimmpd512_mask:
+ case X86::BI__builtin_ia32_fixupimmpd512_maskz:
+ case X86::BI__builtin_ia32_fixupimmps512_mask:
+ case X86::BI__builtin_ia32_fixupimmps512_maskz:
+ case X86::BI__builtin_ia32_fixupimmsd_mask:
+ case X86::BI__builtin_ia32_fixupimmsd_maskz:
+ case X86::BI__builtin_ia32_fixupimmss_mask:
+ case X86::BI__builtin_ia32_fixupimmss_maskz:
+ case X86::BI__builtin_ia32_getmantsd_round_mask:
+ case X86::BI__builtin_ia32_getmantss_round_mask:
+ case X86::BI__builtin_ia32_getmantsh_round_mask:
+ case X86::BI__builtin_ia32_rangepd512_mask:
+ case X86::BI__builtin_ia32_rangeps512_mask:
+ case X86::BI__builtin_ia32_rangesd128_round_mask:
+ case X86::BI__builtin_ia32_rangess128_round_mask:
+ case X86::BI__builtin_ia32_reducesd_mask:
+ case X86::BI__builtin_ia32_reducess_mask:
+ case X86::BI__builtin_ia32_reducesh_mask:
+ case X86::BI__builtin_ia32_rndscalesd_round_mask:
+ case X86::BI__builtin_ia32_rndscaless_round_mask:
+ case X86::BI__builtin_ia32_rndscalesh_round_mask:
+ ArgNum = 5;
+ break;
+ case X86::BI__builtin_ia32_vcvtsd2si64:
+ case X86::BI__builtin_ia32_vcvtsd2si32:
+ case X86::BI__builtin_ia32_vcvtsd2usi32:
+ case X86::BI__builtin_ia32_vcvtsd2usi64:
+ case X86::BI__builtin_ia32_vcvtss2si32:
+ case X86::BI__builtin_ia32_vcvtss2si64:
+ case X86::BI__builtin_ia32_vcvtss2usi32:
+ case X86::BI__builtin_ia32_vcvtss2usi64:
+ case X86::BI__builtin_ia32_vcvtsh2si32:
+ case X86::BI__builtin_ia32_vcvtsh2si64:
+ case X86::BI__builtin_ia32_vcvtsh2usi32:
+ case X86::BI__builtin_ia32_vcvtsh2usi64:
+ case X86::BI__builtin_ia32_sqrtpd512:
+ case X86::BI__builtin_ia32_sqrtps512:
+ case X86::BI__builtin_ia32_sqrtph512:
+ ArgNum = 1;
+ HasRC = true;
+ break;
+ case X86::BI__builtin_ia32_addph512:
+ case X86::BI__builtin_ia32_divph512:
+ case X86::BI__builtin_ia32_mulph512:
+ case X86::BI__builtin_ia32_subph512:
+ case X86::BI__builtin_ia32_addpd512:
+ case X86::BI__builtin_ia32_addps512:
+ case X86::BI__builtin_ia32_divpd512:
+ case X86::BI__builtin_ia32_divps512:
+ case X86::BI__builtin_ia32_mulpd512:
+ case X86::BI__builtin_ia32_mulps512:
+ case X86::BI__builtin_ia32_subpd512:
+ case X86::BI__builtin_ia32_subps512:
+ case X86::BI__builtin_ia32_cvtsi2sd64:
+ case X86::BI__builtin_ia32_cvtsi2ss32:
+ case X86::BI__builtin_ia32_cvtsi2ss64:
+ case X86::BI__builtin_ia32_cvtusi2sd64:
+ case X86::BI__builtin_ia32_cvtusi2ss32:
+ case X86::BI__builtin_ia32_cvtusi2ss64:
+ case X86::BI__builtin_ia32_vcvtusi2sh:
+ case X86::BI__builtin_ia32_vcvtusi642sh:
+ case X86::BI__builtin_ia32_vcvtsi2sh:
+ case X86::BI__builtin_ia32_vcvtsi642sh:
+ ArgNum = 2;
+ HasRC = true;
+ break;
+ case X86::BI__builtin_ia32_cvtdq2ps512_mask:
+ case X86::BI__builtin_ia32_cvtudq2ps512_mask:
+ case X86::BI__builtin_ia32_vcvtpd2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtps2phx512_mask:
+ case X86::BI__builtin_ia32_cvtpd2ps512_mask:
+ case X86::BI__builtin_ia32_cvtpd2dq512_mask:
+ case X86::BI__builtin_ia32_cvtpd2qq512_mask:
+ case X86::BI__builtin_ia32_cvtpd2udq512_mask:
+ case X86::BI__builtin_ia32_cvtpd2uqq512_mask:
+ case X86::BI__builtin_ia32_cvtps2dq512_mask:
+ case X86::BI__builtin_ia32_cvtps2qq512_mask:
+ case X86::BI__builtin_ia32_cvtps2udq512_mask:
+ case X86::BI__builtin_ia32_cvtps2uqq512_mask:
+ case X86::BI__builtin_ia32_cvtqq2pd512_mask:
+ case X86::BI__builtin_ia32_cvtqq2ps512_mask:
+ case X86::BI__builtin_ia32_cvtuqq2pd512_mask:
+ case X86::BI__builtin_ia32_cvtuqq2ps512_mask:
+ case X86::BI__builtin_ia32_vcvtdq2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtudq2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtw2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtuw2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtph2w512_mask:
+ case X86::BI__builtin_ia32_vcvtph2uw512_mask:
+ case X86::BI__builtin_ia32_vcvtph2dq512_mask:
+ case X86::BI__builtin_ia32_vcvtph2udq512_mask:
+ case X86::BI__builtin_ia32_vcvtph2qq512_mask:
+ case X86::BI__builtin_ia32_vcvtph2uqq512_mask:
+ case X86::BI__builtin_ia32_vcvtqq2ph512_mask:
+ case X86::BI__builtin_ia32_vcvtuqq2ph512_mask:
+ ArgNum = 3;
+ HasRC = true;
+ break;
+ case X86::BI__builtin_ia32_addsh_round_mask:
+ case X86::BI__builtin_ia32_addss_round_mask:
+ case X86::BI__builtin_ia32_addsd_round_mask:
+ case X86::BI__builtin_ia32_divsh_round_mask:
+ case X86::BI__builtin_ia32_divss_round_mask:
+ case X86::BI__builtin_ia32_divsd_round_mask:
+ case X86::BI__builtin_ia32_mulsh_round_mask:
+ case X86::BI__builtin_ia32_mulss_round_mask:
+ case X86::BI__builtin_ia32_mulsd_round_mask:
+ case X86::BI__builtin_ia32_subsh_round_mask:
+ case X86::BI__builtin_ia32_subss_round_mask:
+ case X86::BI__builtin_ia32_subsd_round_mask:
+ case X86::BI__builtin_ia32_scalefph512_mask:
+ case X86::BI__builtin_ia32_scalefpd512_mask:
+ case X86::BI__builtin_ia32_scalefps512_mask:
+ case X86::BI__builtin_ia32_scalefsd_round_mask:
+ case X86::BI__builtin_ia32_scalefss_round_mask:
+ case X86::BI__builtin_ia32_scalefsh_round_mask:
+ case X86::BI__builtin_ia32_cvtsd2ss_round_mask:
+ case X86::BI__builtin_ia32_vcvtss2sh_round_mask:
+ case X86::BI__builtin_ia32_vcvtsd2sh_round_mask:
+ case X86::BI__builtin_ia32_sqrtsd_round_mask:
+ case X86::BI__builtin_ia32_sqrtss_round_mask:
+ case X86::BI__builtin_ia32_sqrtsh_round_mask:
+ case X86::BI__builtin_ia32_vfmaddsd3_mask:
+ case X86::BI__builtin_ia32_vfmaddsd3_maskz:
+ case X86::BI__builtin_ia32_vfmaddsd3_mask3:
+ case X86::BI__builtin_ia32_vfmaddss3_mask:
+ case X86::BI__builtin_ia32_vfmaddss3_maskz:
+ case X86::BI__builtin_ia32_vfmaddss3_mask3:
+ case X86::BI__builtin_ia32_vfmaddsh3_mask:
+ case X86::BI__builtin_ia32_vfmaddsh3_maskz:
+ case X86::BI__builtin_ia32_vfmaddsh3_mask3:
+ case X86::BI__builtin_ia32_vfmaddpd512_mask:
+ case X86::BI__builtin_ia32_vfmaddpd512_maskz:
+ case X86::BI__builtin_ia32_vfmaddpd512_mask3:
+ case X86::BI__builtin_ia32_vfmsubpd512_mask3:
+ case X86::BI__builtin_ia32_vfmaddps512_mask:
+ case X86::BI__builtin_ia32_vfmaddps512_maskz:
+ case X86::BI__builtin_ia32_vfmaddps512_mask3:
+ case X86::BI__builtin_ia32_vfmsubps512_mask3:
+ case X86::BI__builtin_ia32_vfmaddph512_mask:
+ case X86::BI__builtin_ia32_vfmaddph512_maskz:
+ case X86::BI__builtin_ia32_vfmaddph512_mask3:
+ case X86::BI__builtin_ia32_vfmsubph512_mask3:
+ case X86::BI__builtin_ia32_vfmaddsubpd512_mask:
+ case X86::BI__builtin_ia32_vfmaddsubpd512_maskz:
+ case X86::BI__builtin_ia32_vfmaddsubpd512_mask3:
+ case X86::BI__builtin_ia32_vfmsubaddpd512_mask3:
+ case X86::BI__builtin_ia32_vfmaddsubps512_mask:
+ case X86::BI__builtin_ia32_vfmaddsubps512_maskz:
+ case X86::BI__builtin_ia32_vfmaddsubps512_mask3:
+ case X86::BI__builtin_ia32_vfmsubaddps512_mask3:
+ case X86::BI__builtin_ia32_vfmaddsubph512_mask:
+ case X86::BI__builtin_ia32_vfmaddsubph512_maskz:
+ case X86::BI__builtin_ia32_vfmaddsubph512_mask3:
+ case X86::BI__builtin_ia32_vfmsubaddph512_mask3:
+ case X86::BI__builtin_ia32_vfmaddcsh_mask:
+ case X86::BI__builtin_ia32_vfmaddcsh_round_mask:
+ case X86::BI__builtin_ia32_vfmaddcsh_round_mask3:
+ case X86::BI__builtin_ia32_vfmaddcph512_mask:
+ case X86::BI__builtin_ia32_vfmaddcph512_maskz:
+ case X86::BI__builtin_ia32_vfmaddcph512_mask3:
+ case X86::BI__builtin_ia32_vfcmaddcsh_mask:
+ case X86::BI__builtin_ia32_vfcmaddcsh_round_mask:
+ case X86::BI__builtin_ia32_vfcmaddcsh_round_mask3:
+ case X86::BI__builtin_ia32_vfcmaddcph512_mask:
+ case X86::BI__builtin_ia32_vfcmaddcph512_maskz:
+ case X86::BI__builtin_ia32_vfcmaddcph512_mask3:
+ case X86::BI__builtin_ia32_vfmulcsh_mask:
+ case X86::BI__builtin_ia32_vfmulcph512_mask:
+ case X86::BI__builtin_ia32_vfcmulcsh_mask:
+ case X86::BI__builtin_ia32_vfcmulcph512_mask:
+ ArgNum = 4;
+ HasRC = true;
+ break;
+ }
+
+ llvm::APSInt Result;
+
+ // We can't check the value of a dependent argument.
+ Expr *Arg = TheCall->getArg(ArgNum);
+ if (Arg->isTypeDependent() || Arg->isValueDependent())
+ return false;
+
+ // Check constant-ness first.
+ if (SemaRef.BuiltinConstantArg(TheCall, ArgNum, Result))
+ return true;
+
+ // Make sure rounding mode is either ROUND_CUR_DIRECTION or ROUND_NO_EXC bit
+ // is set. If the intrinsic has rounding control(bits 1:0), make sure its only
+ // combined with ROUND_NO_EXC. If the intrinsic does not have rounding
+ // control, allow ROUND_NO_EXC and ROUND_CUR_DIRECTION together.
+ if (Result == 4 /*ROUND_CUR_DIRECTION*/ || Result == 8 /*ROUND_NO_EXC*/ ||
+ (!HasRC && Result == 12 /*ROUND_CUR_DIRECTION|ROUND_NO_EXC*/) ||
+ (HasRC && Result.getZExtValue() >= 8 && Result.getZExtValue() <= 11))
+ return false;
+
+ return Diag(TheCall->getBeginLoc(), diag::err_x86_builtin_invalid_rounding)
+ << Arg->getSourceRange();
+}
+
+// Check if the gather/scatter scale is legal.
+bool SemaX86::CheckBuiltinGatherScatterScale(unsigned BuiltinID,
+ CallExpr *TheCall) {
+ unsigned ArgNum = 0;
+ switch (BuiltinID) {
+ default:
+ return false;
+ case X86::BI__builtin_ia32_gatherd_pd:
+ case X86::BI__builtin_ia32_gatherd_pd256:
+ case X86::BI__builtin_ia32_gatherq_pd:
+ case X86::BI__builtin_ia32_gatherq_pd256:
+ case X86::BI__builtin_ia32_gatherd_ps:
+ case X86::BI__builtin_ia32_gatherd_ps256:
+ case X86::BI__builtin_ia32_gatherq_ps:
+ case X86::BI__builtin_ia32_gatherq_ps256:
+ case X86::BI__builtin_ia32_gatherd_q:
+ case X86::BI__builtin_ia32_gatherd_q256:
+ case X86::BI__builtin_ia32_gatherq_q:
+ case X86::BI__builtin_ia32_gatherq_q256:
+ case X86::BI__builtin_ia32_gatherd_d:
+ case X86::BI__builtin_ia32_gatherd_d256:
+ case X86::BI__builtin_ia32_gatherq_d:
+ case X86::BI__builtin_ia32_gatherq_d256:
+ case X86::BI__builtin_ia32_gather3div2df:
+ case X86::BI__builtin_ia32_gather3div2di:
+ case X86::BI__builtin_ia32_gather3div4df:
+ case X86::BI__builtin_ia32_gather3div4di:
+ case X86::BI__builtin_ia32_gather3div4sf:
+ case X86::BI__builtin_ia32_gather3div4si:
+ case X86::BI__builtin_ia32_gather3div8sf:
+ case X86::BI__builtin_ia32_gather3div8si:
+ case X86::BI__builtin_ia32_gather3siv2df:
+ case X86::BI__builtin_ia32_gather3siv2di:
+ case X86::BI__builtin_ia32_gather3siv4df:
+ case X86::BI__builtin_ia32_gather3siv4di:
+ case X86::BI__builtin_ia32_gather3siv4sf:
+ case X86::BI__builtin_ia32_gather3siv4si:
+ case X86::BI__builtin_ia32_gather3siv8sf:
+ case X86::BI__builtin_ia32_gather3siv8si:
+ case X86::BI__builtin_ia32_gathersiv8df:
+ case X86::BI__builtin_ia32_gathersiv16sf:
+ case X86::BI__builtin_ia32_gatherdiv8df:
+ case X86::BI__builtin_ia32_gatherdiv16sf:
+ case X86::BI__builtin_ia32_gathersiv8di:
+ case X86::BI__builtin_ia32_gathersiv16si:
+ case X86::BI__builtin_ia32_gatherdiv8di:
+ case X86::BI__builtin_ia32_gatherdiv16si:
+ case X86::BI__builtin_ia32_scatterdiv2df:
+ case X86::BI__builtin_ia32_scatterdiv2di:
+ case X86::BI__builtin_ia32_scatterdiv4df:
+ case X86::BI__builtin_ia32_scatterdiv4di:
+ case X86::BI__builtin_ia32_scatterdiv4sf:
+ case X86::BI__builtin_ia32_scatterdiv4si:
+ case X86::BI__builtin_ia32_scatterdiv8sf:
+ case X86::BI__builtin_ia32_scatterdiv8si:
+ case X86::BI__builtin_ia32_scattersiv2df:
+ case X86::BI__builtin_ia32_scattersiv2di:
+ case X86::BI__builtin_ia32_scattersiv4df:
+ case X86::BI__builtin_ia32_scattersiv4di:
+ case X86::BI__builtin_ia32_scattersiv4sf:
+ case X86::BI__builtin_ia32_scattersiv4si:
+ case X86::BI__builtin_ia32_scattersiv8sf:
+ case X86::BI__builtin_ia32_scattersiv8si:
+ case X86::BI__builtin_ia32_scattersiv8df:
+ case X86::BI__builtin_ia32_scattersiv16sf:
+ case X86::BI__builtin_ia32_scatterdiv8df:
+ case X86::BI__builtin_ia32_scatterdiv16sf:
+ case X86::BI__builtin_ia32_scattersiv8di:
+ case X86::BI__builtin_ia32_scattersiv16si:
+ case X86::BI__builtin_ia32_scatterdiv8di:
+ case X86::BI__builtin_ia32_scatterdiv16si:
+ ArgNum = 4;
+ break;
+ }
+
+ llvm::APSInt Result;
+
+ // We can't check the value of a dependent argument.
+ Expr *Arg = TheCall->getArg(ArgNum);
+ if (Arg->isTypeDependent() || Arg->isValueDependent())
+ return false;
+
+ // Check constant-ness first.
+ if (SemaRef.BuiltinConstantArg(TheCall, ArgNum, Result))
+ return true;
+
+ if (Result == 1 || Result == 2 || Result == 4 || Result == 8)
+ return false;
+
+ return Diag(TheCall->getBeginLoc(), diag::err_x86_builtin_invalid_scale)
+ << Arg->getSourceRange();
+}
+
+enum { TileRegLow = 0, TileRegHigh = 7 };
+
+bool SemaX86::CheckBuiltinTileArgumentsRange(CallExpr *TheCall,
+ ArrayRef<int> ArgNums) {
+ for (int ArgNum : ArgNums) {
+ if (SemaRef.BuiltinConstantArgRange(TheCall, ArgNum, TileRegLow,
+ TileRegHigh))
+ return true;
+ }
+ return false;
+}
+
+bool SemaX86::CheckBuiltinTileDuplicate(CallExpr *TheCall,
+ ArrayRef<int> ArgNums) {
+ // Because the max number of tile register is TileRegHigh + 1, so here we use
+ // each bit to represent the usage of them in bitset.
+ std::bitset<TileRegHigh + 1> ArgValues;
+ for (int ArgNum : ArgNums) {
+ Expr *Arg = TheCall->getArg(ArgNum);
+ if (Arg->isTypeDependent() || Arg->isValueDependent())
+ continue;
+
+ llvm::APSInt Result;
+ if (SemaRef.BuiltinConstantArg(TheCall, ArgNum, Result))
+ return true;
+ int ArgExtValue = Result.getExtValue();
+ assert((ArgExtValue >= TileRegLow && ArgExtValue <= TileRegHigh) &&
+ "Incorrect tile register num.");
+ if (ArgValues.test(ArgExtValue))
+ return Diag(TheCall->getBeginLoc(),
+ diag::err_x86_builtin_tile_arg_duplicate)
+ << TheCall->getArg(ArgNum)->getSourceRange();
+ ArgValues.set(ArgExtValue);
+ }
+ return false;
+}
+
+bool SemaX86::CheckBuiltinTileRangeAndDuplicate(CallExpr *TheCall,
+ ArrayRef<int> ArgNums) {
+ return CheckBuiltinTileArgumentsRange(TheCall, ArgNums) ||
+ CheckBuiltinTileDuplicate(TheCall, ArgNums);
+}
+
+bool SemaX86::CheckBuiltinTileArguments(unsigned BuiltinID, CallExpr *TheCall) {
+ switch (BuiltinID) {
+ default:
+ return false;
+ case X86::BI__builtin_ia32_tileloadd64:
+ case X86::BI__builtin_ia32_tileloaddt164:
+ case X86::BI__builtin_ia32_tilestored64:
+ case X86::BI__builtin_ia32_tilezero:
+ return CheckBuiltinTileArgumentsRange(TheCall, 0);
+ case X86::BI__builtin_ia32_tdpbssd:
+ case X86::BI__builtin_ia32_tdpbsud:
+ case X86::BI__builtin_ia32_tdpbusd:
+ case X86::BI__builtin_ia32_tdpbuud:
+ case X86::BI__builtin_ia32_tdpbf16ps:
+ case X86::BI__builtin_ia32_tdpfp16ps:
+ case X86::BI__builtin_ia32_tcmmimfp16ps:
+ case X86::BI__builtin_ia32_tcmmrlfp16ps:
+ return CheckBuiltinTileRangeAndDuplicate(TheCall, {0, 1, 2});
+ }
+}
+static bool isX86_32Builtin(unsigned BuiltinID) {
+ // These builtins only work on x86-32 targets.
+ switch (BuiltinID) {
+ case X86::BI__builtin_ia32_readeflags_u32:
+ case X86::BI__builtin_ia32_writeeflags_u32:
+ return true;
+ }
+
+ return false;
+}
+
+bool SemaX86::CheckBuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
+ // Check for 32-bit only builtins on a 64-bit target.
+ const llvm::Triple &TT = TI.getTriple();
+ if (TT.getArch() != llvm::Triple::x86 && isX86_32Builtin(BuiltinID))
+ return Diag(TheCall->getCallee()->getBeginLoc(),
+ diag::err_32_bit_builtin_64_bit_tgt);
+
+ // If the intrinsic has rounding or SAE make sure its valid.
+ if (CheckBuiltinRoundingOrSAE(BuiltinID, TheCall))
+ return true;
+
+ // If the intrinsic has a gather/scatter scale immediate make sure its valid.
+ if (CheckBuiltinGatherScatterScale(BuiltinID, TheCall))
+ return true;
+
+ // If the intrinsic has a tile arguments, make sure they are valid.
+ if (CheckBuiltinTileArguments(BuiltinID, TheCall))
+ return true;
+
+ // For intrinsics which take an immediate value as part of the instruction,
+ // range check them here.
+ int i = 0, l = 0, u = 0;
+ switch (BuiltinID) {
+ default:
+ return false;
+ case X86::BI__builtin_ia32_vec_ext_v2si:
+ case X86::BI__builtin_ia32_vec_ext_v2di:
+ case X86::BI__builtin_ia32_vextractf128_pd256:
+ case X86::BI__builtin_ia32_vextractf128_ps256:
+ case X86::BI__builtin_ia32_vextractf128_si256:
+ case X86::BI__builtin_ia32_extract128i256:
+ case X86::BI__builtin_ia32_extractf64x4_mask:
+ case X86::BI__builtin_ia32_extracti64x4_mask:
+ case X86::BI__builtin_ia32_extractf32x8_mask:
+ case X86::BI__builtin_ia32_extracti32x8_mask:
+ case X86::BI__builtin_ia32_extractf64x2_256_mask:
+ case X86::BI__builtin_ia32_extracti64x2_256_mask:
+ case X86::BI__builtin_ia32_extractf32x4_256_mask:
+ case X86::BI__builtin_ia32_extracti32x4_256_mask:
+ i = 1;
+ l = 0;
+ u = 1;
+ break;
+ case X86::BI__builtin_ia32_vec_set_v2di:
+ case X86::BI__builtin_ia32_vinsertf128_pd256:
+ case X86::BI__builtin_ia32_vinsertf128_ps256:
+ case X86::BI__builtin_ia32_vinsertf128_si256:
+ case X86::BI__builtin_ia32_insert128i256:
+ case X86::BI__builtin_ia32_insertf32x8:
+ case X86::BI__builtin_ia32_inserti32x8:
+ case X86::BI__builtin_ia32_insertf64x4:
+ case X86::BI__builtin_ia32_inserti64x4:
+ case X86::BI__builtin_ia32_insertf64x2_256:
+ case X86::BI__builtin_ia32_inserti64x2_256:
+ case X86::BI__builtin_ia32_insertf32x4_256:
+ case X86::BI__builtin_ia32_inserti32x4_256:
+ i = 2;
+ l = 0;
+ u = 1;
+ break;
+ case X86::BI__builtin_ia32_vpermilpd:
+ case X86::BI__builtin_ia32_vec_ext_v4hi:
+ case X86::BI__builtin_ia32_vec_ext_v4si:
+ case X86::BI__builtin_ia32_vec_ext_v4sf:
+ case X86::BI__builtin_ia32_vec_ext_v4di:
+ case X86::BI__builtin_ia32_extractf32x4_mask:
+ case X86::BI__builtin_ia32_extracti32x4_mask:
+ case X86::BI__builtin_ia32_extractf64x2_512_mask:
+ case X86::BI__builtin_ia32_extracti64x2_512_mask:
+ i = 1;
+ l = 0;
+ u = 3;
+ break;
+ case X86::BI_mm_prefetch:
+ case X86::BI__builtin_ia32_vec_ext_v8hi:
+ case X86::BI__builtin_ia32_vec_ext_v8si:
+ i = 1;
+ l = 0;
+ u = 7;
+ break;
+ case X86::BI__builtin_ia32_sha1rnds4:
+ case X86::BI__builtin_ia32_blendpd:
+ case X86::BI__builtin_ia32_shufpd:
+ case X86::BI__builtin_ia32_vec_set_v4hi:
+ case X86::BI__builtin_ia32_vec_set_v4si:
+ case X86::BI__builtin_ia32_vec_set_v4di:
+ case X86::BI__builtin_ia32_shuf_f32x4_256:
+ case X86::BI__builtin_ia32_shuf_f64x2_256:
+ case X86::BI__builtin_ia32_shuf_i32x4_256:
+ case X86::BI__builtin_ia32_shuf_i64x2_256:
+ case X86::BI__builtin_ia32_insertf64x2_512:
+ case X86::BI__builtin_ia32_inserti64x2_512:
+ case X86::BI__builtin_ia32_insertf32x4:
+ case X86::BI__builtin_ia32_inserti32x4:
+ i = 2;
+ l = 0;
+ u = 3;
+ break;
+ case X86::BI__builtin_ia32_vpermil2pd:
+ case X86::BI__builtin_ia32_vpermil2pd256:
+ case X86::BI__builtin_ia32_vpermil2ps:
+ case X86::BI__builtin_ia32_vpermil2ps256:
+ i = 3;
+ l = 0;
+ u = 3;
+ break;
+ case X86::BI__builtin_ia32_cmpb128_mask:
+ case X86::BI__builtin_ia32_cmpw128_mask:
+ case X86::BI__builtin_ia32_cmpd128_mask:
+ case X86::BI__builtin_ia32_cmpq128_mask:
+ case X86::BI__builtin_ia32_cmpb256_mask:
+ case X86::BI__builtin_ia32_cmpw256_mask:
+ case X86::BI__builtin_ia32_cmpd256_mask:
+ case X86::BI__builtin_ia32_cmpq256_mask:
+ case X86::BI__builtin_ia32_cmpb512_mask:
+ case X86::BI__builtin_ia32_cmpw512_mask:
+ case X86::BI__builtin_ia32_cmpd512_mask:
+ case X86::BI__builtin_ia32_cmpq512_mask:
+ case X86::BI__builtin_ia32_ucmpb128_mask:
+ case X86::BI__builtin_ia32_ucmpw128_mask:
+ case X86::BI__builtin_ia32_ucmpd128_mask:
+ case X86::BI__builtin_ia32_ucmpq128_mask:
+ case X86::BI__builtin_ia32_ucmpb256_mask:
+ case X86::BI__builtin_ia32_ucmpw256_mask:
+ case X86::BI__builtin_ia32_ucmpd256_mask:
+ case X86::BI__builtin_ia32_ucmpq256_mask:
+ case X86::BI__builtin_ia32_ucmpb512_mask:
+ case X86::BI__builtin_ia32_ucmpw512_mask:
+ case X86::BI__builtin_ia32_ucmpd512_mask:
+ case X86::BI__builtin_ia32_ucmpq512_mask:
+ case X86::BI__builtin_ia32_vpcomub:
+ case X86::BI__builtin_ia32_vpcomuw:
+ case X86::BI__builtin_ia32_vpcomud:
+ case X86::BI__builtin_ia32_vpcomuq:
+ case X86::BI__builtin_ia32_vpcomb:
+ case X86::BI__builtin_ia32_vpcomw:
+ case X86::BI__builtin_ia32_vpcomd:
+ case X86::BI__builtin_ia32_vpcomq:
+ case X86::BI__builtin_ia32_vec_set_v8hi:
+ case X86::BI__builtin_ia32_vec_set_v8si:
+ i = 2;
+ l = 0;
+ u = 7;
+ break;
+ case X86::BI__builtin_ia32_vpermilpd256:
+ case X86::BI__builtin_ia32_roundps:
+ case X86::BI__builtin_ia32_roundpd:
+ case X86::BI__builtin_ia32_roundps256:
+ case X86::BI__builtin_ia32_roundpd256:
+ case X86::BI__builtin_ia32_getmantpd128_mask:
+ case X86::BI__builtin_ia32_getmantpd256_mask:
+ case X86::BI__builtin_ia32_getmantps128_mask:
+ case X86::BI__builtin_ia32_getmantps256_mask:
+ case X86::BI__builtin_ia32_getmantpd512_mask:
+ case X86::BI__builtin_ia32_getmantps512_mask:
+ case X86::BI__builtin_ia32_getmantph128_mask:
+ case X86::BI__builtin_ia32_getmantph256_mask:
+ case X86::BI__builtin_ia32_getmantph512_mask:
+ case X86::BI__builtin_ia32_vec_ext_v16qi:
+ case X86::BI__builtin_ia32_vec_ext_v16hi:
+ i = 1;
+ l = 0;
+ u = 15;
+ break;
+ case X86::BI__builtin_ia32_pblendd128:
+ case X86::BI__builtin_ia32_blendps:
+ case X86::BI__builtin_ia32_blendpd256:
+ case X86::BI__builtin_ia32_shufpd256:
+ case X86::BI__builtin_ia32_roundss:
+ case X86::BI__builtin_ia32_roundsd:
+ case X86::BI__builtin_ia32_rangepd128_mask:
+ case X86::BI__builtin_ia32_rangepd256_mask:
+ case X86::BI__builtin_ia32_rangepd512_mask:
+ case X86::BI__builtin_ia32_rangeps128_mask:
+ case X86::BI__builtin_ia32_rangeps256_mask:
+ case X86::BI__builtin_ia32_rangeps512_mask:
+ case X86::BI__builtin_ia32_getmantsd_round_mask:
+ case X86::BI__builtin_ia32_getmantss_round_mask:
+ case X86::BI__builtin_ia32_getmantsh_round_mask:
+ case X86::BI__builtin_ia32_vec_set_v16qi:
+ case X86::BI__builtin_ia32_vec_set_v16hi:
+ i = 2;
+ l = 0;
+ u = 15;
+ break;
+ case X86::BI__builtin_ia32_vec_ext_v32qi:
+ i = 1;
+ l = 0;
+ u = 31;
+ break;
+ case X86::BI__builtin_ia32_cmpps:
+ case X86::BI__builtin_ia32_cmpss:
+ case X86::BI__builtin_ia32_cmppd:
+ case X86::BI__builtin_ia32_cmpsd:
+ case X86::BI__builtin_ia32_cmpps256:
+ case X86::BI__builtin_ia32_cmppd256:
+ case X86::BI__builtin_ia32_cmpps128_mask:
+ case X86::BI__builtin_ia32_cmppd128_mask:
+ case X86::BI__builtin_ia32_cmpps256_mask:
+ case X86::BI__builtin_ia32_cmppd256_mask:
+ case X86::BI__builtin_ia32_cmpps512_mask:
+ case X86::BI__builtin_ia32_cmppd512_mask:
+ case X86::BI__builtin_ia32_cmpsd_mask:
+ case X86::BI__builtin_ia32_cmpss_mask:
+ case X86::BI__builtin_ia32_vec_set_v32qi:
+ i = 2;
+ l = 0;
+ u = 31;
+ break;
+ case X86::BI__builtin_ia32_permdf256:
+ case X86::BI__builtin_ia32_permdi256:
+ case X86::BI__builtin_ia32_permdf512:
+ case X86::BI__builtin_ia32_permdi512:
+ case X86::BI__builtin_ia32_vpermilps:
+ case X86::BI__builtin_ia32_vpermilps256:
+ case X86::BI__builtin_ia32_vpermilpd512:
+ case X86::BI__builtin_ia32_vpermilps512:
+ case X86::BI__builtin_ia32_pshufd:
+ case X86::BI__builtin_ia32_pshufd256:
+ case X86::BI__builtin_ia32_pshufd512:
+ case X86::BI__builtin_ia32_pshufhw:
+ case X86::BI__builtin_ia32_pshufhw256:
+ case X86::BI__builtin_ia32_pshufhw512:
+ case X86::BI__builtin_ia32_pshuflw:
+ case X86::BI__builtin_ia32_pshuflw256:
+ case X86::BI__builtin_ia32_pshuflw512:
+ case X86::BI__builtin_ia32_vcvtps2ph:
+ case X86::BI__builtin_ia32_vcvtps2ph_mask:
+ case X86::BI__builtin_ia32_vcvtps2ph256:
+ case X86::BI__builtin_ia32_vcvtps2ph256_mask:
+ case X86::BI__builtin_ia32_vcvtps2ph512_mask:
+ case X86::BI__builtin_ia32_rndscaleps_128_mask:
+ case X86::BI__builtin_ia32_rndscalepd_128_mask:
+ case X86::BI__builtin_ia32_rndscaleps_256_mask:
+ case X86::BI__builtin_ia32_rndscalepd_256_mask:
+ case X86::BI__builtin_ia32_rndscaleps_mask:
+ case X86::BI__builtin_ia32_rndscalepd_mask:
+ case X86::BI__builtin_ia32_rndscaleph_mask:
+ case X86::BI__builtin_ia32_reducepd128_mask:
+ case X86::BI__builtin_ia32_reducepd256_mask:
+ case X86::BI__builtin_ia32_reducepd512_mask:
+ case X86::BI__builtin_ia32_reduceps128_mask:
+ case X86::BI__builtin_ia32_reduceps256_mask:
+ case X86::BI__builtin_ia32_reduceps512_mask:
+ case X86::BI__builtin_ia32_reduceph128_mask:
+ case X86::BI__builtin_ia32_reduceph256_mask:
+ case X86::BI__builtin_ia32_reduceph512_mask:
+ case X86::BI__builtin_ia32_prold512:
+ case X86::BI__builtin_ia32_prolq512:
+ case X86::BI__builtin_ia32_prold128:
+ case X86::BI__builtin_ia32_prold256:
+ case X86::BI__builtin_ia32_prolq128:
+ case X86::BI__builtin_ia32_prolq256:
+ case X86::BI__builtin_ia32_prord512:
+ case X86::BI__builtin_ia32_prorq512:
+ case X86::BI__builtin_ia32_prord128:
+ case X86::BI__builtin_ia32_prord256:
+ case X86::BI__builtin_ia32_prorq128:
+ case X86::BI__builtin_ia32_prorq256:
+ case X86::BI__builtin_ia32_fpclasspd128_mask:
+ case X86::BI__builtin_ia32_fpclasspd256_mask:
+ case X86::BI__builtin_ia32_fpclassps128_mask:
+ case X86::BI__builtin_ia32_fpclassps256_mask:
+ case X86::BI__builtin_ia32_fpclassps512_mask:
+ case X86::BI__builtin_ia32_fpclasspd512_mask:
+ case X86::BI__builtin_ia32_fpclassph128_mask:
+ case X86::BI__builtin_ia32_fpclassph256_mask:
+ case X86::BI__builtin_ia32_fpclassph512_mask:
+ case X86::BI__builtin_ia32_fpclasssd_mask:
+ case X86::BI__builtin_ia32_fpclassss_mask:
+ case X86::BI__builtin_ia32_fpclasssh_mask:
+ case X86::BI__builtin_ia32_pslldqi128_byteshift:
+ case X86::BI__builtin_ia32_pslldqi256_byteshift:
+ case X86::BI__builtin_ia32_pslldqi512_byteshift:
+ case X86::BI__builtin_ia32_psrldqi128_byteshift:
+ case X86::BI__builtin_ia32_psrldqi256_byteshift:
+ case X86::BI__builtin_ia32_psrldqi512_byteshift:
+ case X86::BI__builtin_ia32_kshiftliqi:
+ case X86::BI__builtin_ia32_kshiftlihi:
+ case X86::BI__builtin_ia32_kshiftlisi:
+ case X86::BI__builtin_ia32_kshiftlidi:
+ case X86::BI__builtin_ia32_kshiftriqi:
+ case X86::BI__builtin_ia32_kshiftrihi:
+ case X86::BI__builtin_ia32_kshiftrisi:
+ case X86::BI__builtin_ia32_kshiftridi:
+ i = 1;
+ l = 0;
+ u = 255;
+ break;
+ case X86::BI__builtin_ia32_vperm2f128_pd256:
+ case X86::BI__builtin_ia32_vperm2f128_ps256:
+ case X86::BI__builtin_ia32_vperm2f128_si256:
+ case X86::BI__builtin_ia32_permti256:
+ case X86::BI__builtin_ia32_pblendw128:
+ case X86::BI__builtin_ia32_pblendw256:
+ case X86::BI__builtin_ia32_blendps256:
+ case X86::BI__builtin_ia32_pblendd256:
+ case X86::BI__builtin_ia32_palignr128:
+ case X86::BI__builtin_ia32_palignr256:
+ case X86::BI__builtin_ia32_palignr512:
+ case X86::BI__builtin_ia32_alignq512:
+ case X86::BI__builtin_ia32_alignd512:
+ case X86::BI__builtin_ia32_alignd128:
+ case X86::BI__builtin_ia32_alignd256:
+ case X86::BI__builtin_ia32_alignq128:
+ case X86::BI__builtin_ia32_alignq256:
+ case X86::BI__builtin_ia32_vcomisd:
+ case X86::BI__builtin_ia32_vcomiss:
+ case X86::BI__builtin_ia32_shuf_f32x4:
+ case X86::BI__builtin_ia32_shuf_f64x2:
+ case X86::BI__builtin_ia32_shuf_i32x4:
+ case X86::BI__builtin_ia32_shuf_i64x2:
+ case X86::BI__builtin_ia32_shufpd512:
+ case X86::BI__builtin_ia32_shufps:
+ case X86::BI__builtin_ia32_shufps256:
+ case X86::BI__builtin_ia32_shufps512:
+ case X86::BI__builtin_ia32_dbpsadbw128:
+ case X86::BI__builtin_ia32_dbpsadbw256:
+ case X86::BI__builtin_ia32_dbpsadbw512:
+ case X86::BI__builtin_ia32_vpshldd128:
+ case X86::BI__builtin_ia32_vpshldd256:
+ case X86::BI__builtin_ia32_vpshldd512:
+ case X86::BI__builtin_ia32_vpshldq128:
+ case X86::BI__builtin_ia32_vpshldq256:
+ case X86::BI__builtin_ia32_vpshldq512:
+ case X86::BI__builtin_ia32_vpshldw128:
+ case X86::BI__builtin_ia32_vpshldw256:
+ case X86::BI__builtin_ia32_vpshldw512:
+ case X86::BI__builtin_ia32_vpshrdd128:
+ case X86::BI__builtin_ia32_vpshrdd256:
+ case X86::BI__builtin_ia32_vpshrdd512:
+ case X86::BI__builtin_ia32_vpshrdq128:
+ case X86::BI__builtin_ia32_vpshrdq256:
+ case X86::BI__builtin_ia32_vpshrdq512:
+ case X86::BI__builtin_ia32_vpshrdw128:
+ case X86::BI__builtin_ia32_vpshrdw256:
+ case X86::BI__builtin_ia32_vpshrdw512:
+ i = 2;
+ l = 0;
+ u = 255;
+ break;
+ case X86::BI__builtin_ia32_fixupimmpd512_mask:
+ case X86::BI__builtin_ia32_fixupimmpd512_maskz:
+ case X86::BI__builtin_ia32_fixupimmps512_mask:
+ case X86::BI__builtin_ia32_fixupimmps512_maskz:
+ case X86::BI__builtin_ia32_fixupimmsd_mask:
+ case X86::BI__builtin_ia32_fixupimmsd_maskz:
+ case X86::BI__builtin_ia32_fixupimmss_mask:
+ case X86::BI__builtin_ia32_fixupimmss_maskz:
+ case X86::BI__builtin_ia32_fixupimmpd128_mask:
+ case X86::BI__builtin_ia32_fixupimmpd128_maskz:
+ case X86::BI__builtin_ia32_fixupimmpd256_mask:
+ case X86::BI__builtin_ia32_fixupimmpd256_maskz:
+ case X86::BI__builtin_ia32_fixupimmps128_mask:
+ case X86::BI__builtin_ia32_fixupimmps128_maskz:
+ case X86::BI__builtin_ia32_fixupimmps256_mask:
+ case X86::BI__builtin_ia32_fixupimmps256_maskz:
+ case X86::BI__builtin_ia32_pternlogd512_mask:
+ case X86::BI__builtin_ia32_pternlogd512_maskz:
+ case X86::BI__builtin_ia32_pternlogq512_mask:
+ case X86::BI__builtin_ia32_pternlogq512_maskz:
+ case X86::BI__builtin_ia32_pternlogd128_mask:
+ case X86::BI__builtin_ia32_pternlogd128_maskz:
+ case X86::BI__builtin_ia32_pternlogd256_mask:
+ case X86::BI__builtin_ia32_pternlogd256_maskz:
+ case X86::BI__builtin_ia32_pternlogq128_mask:
+ case X86::BI__builtin_ia32_pternlogq128_maskz:
+ case X86::BI__builtin_ia32_pternlogq256_mask:
+ case X86::BI__builtin_ia32_pternlogq256_maskz:
+ case X86::BI__builtin_ia32_vsm3rnds2:
+ i = 3;
+ l = 0;
+ u = 255;
+ break;
+ case X86::BI__builtin_ia32_reducesd_mask:
+ case X86::BI__builtin_ia32_reducess_mask:
+ case X86::BI__builtin_ia32_rndscalesd_round_mask:
+ case X86::BI__builtin_ia32_rndscaless_round_mask:
+ case X86::BI__builtin_ia32_rndscalesh_round_mask:
+ case X86::BI__builtin_ia32_reducesh_mask:
+ i = 4;
+ l = 0;
+ u = 255;
+ break;
+ case X86::BI__builtin_ia32_cmpccxadd32:
+ case X86::BI__builtin_ia32_cmpccxadd64:
+ i = 3;
+ l = 0;
+ u = 15;
+ break;
+ }
+
+ // Note that we don't force a hard error on the range check here, allowing
+ // template-generated or macro-generated dead code to potentially have out-of-
+ // range values. These need to code generate, but don't need to necessarily
+ // make any sense. We use a warning that defaults to an error.
+ return SemaRef.BuiltinConstantArgRange(TheCall, i, l, u,
+ /*RangeIsError*/ false);
+}
+
+void SemaX86::handleAnyInterruptAttr(Decl *D, const ParsedAttr &AL) {
+ // Semantic checks for a function with the 'interrupt' attribute.
+ // a) Must be a function.
+ // b) Must have the 'void' return type.
+ // c) Must take 1 or 2 arguments.
+ // d) The 1st argument must be a pointer.
+ // e) The 2nd argument (if any) must be an unsigned integer.
+ ASTContext &Context = getASTContext();
+
+ if (!isFuncOrMethodForAttrSubject(D) || !hasFunctionProto(D) ||
+ isInstanceMethod(D) ||
+ CXXMethodDecl::isStaticOverloadedOperator(
+ cast<NamedDecl>(D)->getDeclName().getCXXOverloadedOperator())) {
+ Diag(AL.getLoc(), diag::warn_attribute_wrong_decl_type)
+ << AL << AL.isRegularKeywordAttribute()
+ << ExpectedFunctionWithProtoType;
+ return;
+ }
+ // Interrupt handler must have void return type.
+ if (!getFunctionOrMethodResultType(D)->isVoidType()) {
+ Diag(getFunctionOrMethodResultSourceRange(D).getBegin(),
+ diag::err_anyx86_interrupt_attribute)
+ << (SemaRef.Context.getTargetInfo().getTriple().getArch() ==
+ llvm::Triple::x86
+ ? 0
+ : 1)
+ << 0;
+ return;
+ }
+ // Interrupt handler must have 1 or 2 parameters.
+ unsigned NumParams = getFunctionOrMethodNumParams(D);
+ if (NumParams < 1 || NumParams > 2) {
+ Diag(D->getBeginLoc(), diag::err_anyx86_interrupt_attribute)
+ << (Context.getTargetInfo().getTriple().getArch() == llvm::Triple::x86
+ ? 0
+ : 1)
+ << 1;
+ return;
+ }
+ // The first argument must be a pointer.
+ if (!getFunctionOrMethodParamType(D, 0)->isPointerType()) {
+ Diag(getFunctionOrMethodParamRange(D, 0).getBegin(),
+ diag::err_anyx86_interrupt_attribute)
+ << (Context.getTargetInfo().getTriple().getArch() == llvm::Triple::x86
+ ? 0
+ : 1)
+ << 2;
+ return;
+ }
+ // The second argument, if present, must be an unsigned integer.
+ unsigned TypeSize =
+ Context.getTargetInfo().getTriple().getArch() == llvm::Triple::x86_64
+ ? 64
+ : 32;
+ if (NumParams == 2 &&
+ (!getFunctionOrMethodParamType(D, 1)->isUnsignedIntegerType() ||
+ Context.getTypeSize(getFunctionOrMethodParamType(D, 1)) != TypeSize)) {
+ Diag(getFunctionOrMethodParamRange(D, 1).getBegin(),
+ diag::err_anyx86_interrupt_attribute)
+ << (Context.getTargetInfo().getTriple().getArch() == llvm::Triple::x86
+ ? 0
+ : 1)
+ << 3 << Context.getIntTypeForBitwidth(TypeSize, /*Signed=*/false);
+ return;
+ }
+ D->addAttr(::new (Context) AnyX86InterruptAttr(Context, AL));
+ D->addAttr(UsedAttr::CreateImplicit(Context));
+}
+
+void SemaX86::handleForceAlignArgPointerAttr(Decl *D, const ParsedAttr &AL) {
+ // If we try to apply it to a function pointer, don't warn, but don't
+ // do anything, either. It doesn't matter anyway, because there's nothing
+ // special about calling a force_align_arg_pointer function.
+ const auto *VD = dyn_cast<ValueDecl>(D);
+ if (VD && VD->getType()->isFunctionPointerType())
+ return;
+ // Also don't warn on function pointer typedefs.
+ const auto *TD = dyn_cast<TypedefNameDecl>(D);
+ if (TD && (TD->getUnderlyingType()->isFunctionPointerType() ||
+ TD->getUnderlyingType()->isFunctionType()))
+ return;
+ // Attribute can only be applied to function types.
+ if (!isa<FunctionDecl>(D)) {
+ Diag(AL.getLoc(), diag::warn_attribute_wrong_decl_type)
+ << AL << AL.isRegularKeywordAttribute() << ExpectedFunction;
+ return;
+ }
+
+ D->addAttr(::new (getASTContext())
+ X86ForceAlignArgPointerAttr(getASTContext(), AL));
+}
+
+} // namespace clang