summaryrefslogtreecommitdiff
path: root/clang/lib/Sema/SemaChecking.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'clang/lib/Sema/SemaChecking.cpp')
-rw-r--r--clang/lib/Sema/SemaChecking.cpp2536
1 files changed, 1538 insertions, 998 deletions
diff --git a/clang/lib/Sema/SemaChecking.cpp b/clang/lib/Sema/SemaChecking.cpp
index 74742023d1b3..509d88e25000 100644
--- a/clang/lib/Sema/SemaChecking.cpp
+++ b/clang/lib/Sema/SemaChecking.cpp
@@ -30,6 +30,7 @@
#include "clang/AST/NSAPI.h"
#include "clang/AST/NonTrivialTypeVisitor.h"
#include "clang/AST/OperationKinds.h"
+#include "clang/AST/RecordLayout.h"
#include "clang/AST/Stmt.h"
#include "clang/AST/TemplateBase.h"
#include "clang/AST/Type.h"
@@ -87,6 +88,7 @@
#include "llvm/Support/SaveAndRestore.h"
#include "llvm/Support/raw_ostream.h"
#include <algorithm>
+#include <bitset>
#include <cassert>
#include <cstddef>
#include <cstdint>
@@ -236,8 +238,8 @@ static bool SemaBuiltinAlignment(Sema &S, CallExpr *TheCall, unsigned ID) {
}
Expr::EvalResult AlignResult;
unsigned MaxAlignmentBits = S.Context.getIntWidth(SrcTy) - 1;
- // We can't check validity of alignment if it is type dependent.
- if (!AlignOp->isInstantiationDependent() &&
+ // We can't check validity of alignment if it is value dependent.
+ if (!AlignOp->isValueDependent() &&
AlignOp->EvaluateAsInt(AlignResult, S.Context,
Expr::SE_AllowSideEffects)) {
llvm::APSInt AlignValue = AlignResult.Val.getInt();
@@ -282,48 +284,60 @@ static bool SemaBuiltinAlignment(Sema &S, CallExpr *TheCall, unsigned ID) {
return false;
}
-static bool SemaBuiltinOverflow(Sema &S, CallExpr *TheCall) {
+static bool SemaBuiltinOverflow(Sema &S, CallExpr *TheCall,
+ unsigned BuiltinID) {
if (checkArgCount(S, TheCall, 3))
return true;
// First two arguments should be integers.
for (unsigned I = 0; I < 2; ++I) {
- ExprResult Arg = TheCall->getArg(I);
+ ExprResult Arg = S.DefaultFunctionArrayLvalueConversion(TheCall->getArg(I));
+ if (Arg.isInvalid()) return true;
+ TheCall->setArg(I, Arg.get());
+
QualType Ty = Arg.get()->getType();
if (!Ty->isIntegerType()) {
S.Diag(Arg.get()->getBeginLoc(), diag::err_overflow_builtin_must_be_int)
<< Ty << Arg.get()->getSourceRange();
return true;
}
- InitializedEntity Entity = InitializedEntity::InitializeParameter(
- S.getASTContext(), Ty, /*consume*/ false);
- Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
- if (Arg.isInvalid())
- return true;
- TheCall->setArg(I, Arg.get());
}
// Third argument should be a pointer to a non-const integer.
// IRGen correctly handles volatile, restrict, and address spaces, and
// the other qualifiers aren't possible.
{
- ExprResult Arg = TheCall->getArg(2);
+ ExprResult Arg = S.DefaultFunctionArrayLvalueConversion(TheCall->getArg(2));
+ if (Arg.isInvalid()) return true;
+ TheCall->setArg(2, Arg.get());
+
QualType Ty = Arg.get()->getType();
const auto *PtrTy = Ty->getAs<PointerType>();
- if (!(PtrTy && PtrTy->getPointeeType()->isIntegerType() &&
- !PtrTy->getPointeeType().isConstQualified())) {
+ if (!PtrTy ||
+ !PtrTy->getPointeeType()->isIntegerType() ||
+ PtrTy->getPointeeType().isConstQualified()) {
S.Diag(Arg.get()->getBeginLoc(),
diag::err_overflow_builtin_must_be_ptr_int)
- << Ty << Arg.get()->getSourceRange();
+ << Ty << Arg.get()->getSourceRange();
return true;
}
- InitializedEntity Entity = InitializedEntity::InitializeParameter(
- S.getASTContext(), Ty, /*consume*/ false);
- Arg = S.PerformCopyInitialization(Entity, SourceLocation(), Arg);
- if (Arg.isInvalid())
- return true;
- TheCall->setArg(2, Arg.get());
}
+
+ // Disallow signed ExtIntType args larger than 128 bits to mul function until
+ // we improve backend support.
+ if (BuiltinID == Builtin::BI__builtin_mul_overflow) {
+ for (unsigned I = 0; I < 3; ++I) {
+ const auto Arg = TheCall->getArg(I);
+ // Third argument will be a pointer.
+ auto Ty = I < 2 ? Arg->getType() : Arg->getType()->getPointeeType();
+ if (Ty->isExtIntType() && Ty->isSignedIntegerType() &&
+ S.getASTContext().getIntWidth(Ty) > 128)
+ return S.Diag(Arg->getBeginLoc(),
+ diag::err_overflow_builtin_ext_int_max_size)
+ << 128;
+ }
+ }
+
return false;
}
@@ -390,13 +404,194 @@ static bool SemaBuiltinCallWithStaticChain(Sema &S, CallExpr *BuiltinCall) {
return false;
}
+namespace {
+
+class EstimateSizeFormatHandler
+ : public analyze_format_string::FormatStringHandler {
+ size_t Size;
+
+public:
+ EstimateSizeFormatHandler(StringRef Format)
+ : Size(std::min(Format.find(0), Format.size()) +
+ 1 /* null byte always written by sprintf */) {}
+
+ bool HandlePrintfSpecifier(const analyze_printf::PrintfSpecifier &FS,
+ const char *, unsigned SpecifierLen) override {
+
+ const size_t FieldWidth = computeFieldWidth(FS);
+ const size_t Precision = computePrecision(FS);
+
+ // The actual format.
+ switch (FS.getConversionSpecifier().getKind()) {
+ // Just a char.
+ case analyze_format_string::ConversionSpecifier::cArg:
+ case analyze_format_string::ConversionSpecifier::CArg:
+ Size += std::max(FieldWidth, (size_t)1);
+ break;
+ // Just an integer.
+ case analyze_format_string::ConversionSpecifier::dArg:
+ case analyze_format_string::ConversionSpecifier::DArg:
+ case analyze_format_string::ConversionSpecifier::iArg:
+ case analyze_format_string::ConversionSpecifier::oArg:
+ case analyze_format_string::ConversionSpecifier::OArg:
+ case analyze_format_string::ConversionSpecifier::uArg:
+ case analyze_format_string::ConversionSpecifier::UArg:
+ case analyze_format_string::ConversionSpecifier::xArg:
+ case analyze_format_string::ConversionSpecifier::XArg:
+ Size += std::max(FieldWidth, Precision);
+ break;
+
+ // %g style conversion switches between %f or %e style dynamically.
+ // %f always takes less space, so default to it.
+ case analyze_format_string::ConversionSpecifier::gArg:
+ case analyze_format_string::ConversionSpecifier::GArg:
+
+ // Floating point number in the form '[+]ddd.ddd'.
+ case analyze_format_string::ConversionSpecifier::fArg:
+ case analyze_format_string::ConversionSpecifier::FArg:
+ Size += std::max(FieldWidth, 1 /* integer part */ +
+ (Precision ? 1 + Precision
+ : 0) /* period + decimal */);
+ break;
+
+ // Floating point number in the form '[-]d.ddde[+-]dd'.
+ case analyze_format_string::ConversionSpecifier::eArg:
+ case analyze_format_string::ConversionSpecifier::EArg:
+ Size +=
+ std::max(FieldWidth,
+ 1 /* integer part */ +
+ (Precision ? 1 + Precision : 0) /* period + decimal */ +
+ 1 /* e or E letter */ + 2 /* exponent */);
+ break;
+
+ // Floating point number in the form '[-]0xh.hhhhp±dd'.
+ case analyze_format_string::ConversionSpecifier::aArg:
+ case analyze_format_string::ConversionSpecifier::AArg:
+ Size +=
+ std::max(FieldWidth,
+ 2 /* 0x */ + 1 /* integer part */ +
+ (Precision ? 1 + Precision : 0) /* period + decimal */ +
+ 1 /* p or P letter */ + 1 /* + or - */ + 1 /* value */);
+ break;
+
+ // Just a string.
+ case analyze_format_string::ConversionSpecifier::sArg:
+ case analyze_format_string::ConversionSpecifier::SArg:
+ Size += FieldWidth;
+ break;
+
+ // Just a pointer in the form '0xddd'.
+ case analyze_format_string::ConversionSpecifier::pArg:
+ Size += std::max(FieldWidth, 2 /* leading 0x */ + Precision);
+ break;
+
+ // A plain percent.
+ case analyze_format_string::ConversionSpecifier::PercentArg:
+ Size += 1;
+ break;
+
+ default:
+ break;
+ }
+
+ Size += FS.hasPlusPrefix() || FS.hasSpacePrefix();
+
+ if (FS.hasAlternativeForm()) {
+ switch (FS.getConversionSpecifier().getKind()) {
+ default:
+ break;
+ // Force a leading '0'.
+ case analyze_format_string::ConversionSpecifier::oArg:
+ Size += 1;
+ break;
+ // Force a leading '0x'.
+ case analyze_format_string::ConversionSpecifier::xArg:
+ case analyze_format_string::ConversionSpecifier::XArg:
+ Size += 2;
+ break;
+ // Force a period '.' before decimal, even if precision is 0.
+ case analyze_format_string::ConversionSpecifier::aArg:
+ case analyze_format_string::ConversionSpecifier::AArg:
+ case analyze_format_string::ConversionSpecifier::eArg:
+ case analyze_format_string::ConversionSpecifier::EArg:
+ case analyze_format_string::ConversionSpecifier::fArg:
+ case analyze_format_string::ConversionSpecifier::FArg:
+ case analyze_format_string::ConversionSpecifier::gArg:
+ case analyze_format_string::ConversionSpecifier::GArg:
+ Size += (Precision ? 0 : 1);
+ break;
+ }
+ }
+ assert(SpecifierLen <= Size && "no underflow");
+ Size -= SpecifierLen;
+ return true;
+ }
+
+ size_t getSizeLowerBound() const { return Size; }
+
+private:
+ static size_t computeFieldWidth(const analyze_printf::PrintfSpecifier &FS) {
+ const analyze_format_string::OptionalAmount &FW = FS.getFieldWidth();
+ size_t FieldWidth = 0;
+ if (FW.getHowSpecified() == analyze_format_string::OptionalAmount::Constant)
+ FieldWidth = FW.getConstantAmount();
+ return FieldWidth;
+ }
+
+ static size_t computePrecision(const analyze_printf::PrintfSpecifier &FS) {
+ const analyze_format_string::OptionalAmount &FW = FS.getPrecision();
+ size_t Precision = 0;
+
+ // See man 3 printf for default precision value based on the specifier.
+ switch (FW.getHowSpecified()) {
+ case analyze_format_string::OptionalAmount::NotSpecified:
+ switch (FS.getConversionSpecifier().getKind()) {
+ default:
+ break;
+ case analyze_format_string::ConversionSpecifier::dArg: // %d
+ case analyze_format_string::ConversionSpecifier::DArg: // %D
+ case analyze_format_string::ConversionSpecifier::iArg: // %i
+ Precision = 1;
+ break;
+ case analyze_format_string::ConversionSpecifier::oArg: // %d
+ case analyze_format_string::ConversionSpecifier::OArg: // %D
+ case analyze_format_string::ConversionSpecifier::uArg: // %d
+ case analyze_format_string::ConversionSpecifier::UArg: // %D
+ case analyze_format_string::ConversionSpecifier::xArg: // %d
+ case analyze_format_string::ConversionSpecifier::XArg: // %D
+ Precision = 1;
+ break;
+ case analyze_format_string::ConversionSpecifier::fArg: // %f
+ case analyze_format_string::ConversionSpecifier::FArg: // %F
+ case analyze_format_string::ConversionSpecifier::eArg: // %e
+ case analyze_format_string::ConversionSpecifier::EArg: // %E
+ case analyze_format_string::ConversionSpecifier::gArg: // %g
+ case analyze_format_string::ConversionSpecifier::GArg: // %G
+ Precision = 6;
+ break;
+ case analyze_format_string::ConversionSpecifier::pArg: // %d
+ Precision = 1;
+ break;
+ }
+ break;
+ case analyze_format_string::OptionalAmount::Constant:
+ Precision = FW.getConstantAmount();
+ break;
+ default:
+ break;
+ }
+ return Precision;
+ }
+};
+
+} // namespace
+
/// Check a call to BuiltinID for buffer overflows. If BuiltinID is a
/// __builtin_*_chk function, then use the object size argument specified in the
/// source. Otherwise, infer the object size using __builtin_object_size.
void Sema::checkFortifiedBuiltinMemoryFunction(FunctionDecl *FD,
CallExpr *TheCall) {
// FIXME: There are some more useful checks we could be doing here:
- // - Analyze the format string of sprintf to see how much of buffer is used.
// - Evaluate strlen of strcpy arguments, use as object size.
if (TheCall->isValueDependent() || TheCall->isTypeDependent() ||
@@ -407,12 +602,55 @@ void Sema::checkFortifiedBuiltinMemoryFunction(FunctionDecl *FD,
if (!BuiltinID)
return;
+ const TargetInfo &TI = getASTContext().getTargetInfo();
+ unsigned SizeTypeWidth = TI.getTypeWidth(TI.getSizeType());
+
unsigned DiagID = 0;
bool IsChkVariant = false;
+ Optional<llvm::APSInt> UsedSize;
unsigned SizeIndex, ObjectIndex;
switch (BuiltinID) {
default:
return;
+ case Builtin::BIsprintf:
+ case Builtin::BI__builtin___sprintf_chk: {
+ size_t FormatIndex = BuiltinID == Builtin::BIsprintf ? 1 : 3;
+ auto *FormatExpr = TheCall->getArg(FormatIndex)->IgnoreParenImpCasts();
+
+ if (auto *Format = dyn_cast<StringLiteral>(FormatExpr)) {
+
+ if (!Format->isAscii() && !Format->isUTF8())
+ return;
+
+ StringRef FormatStrRef = Format->getString();
+ EstimateSizeFormatHandler H(FormatStrRef);
+ const char *FormatBytes = FormatStrRef.data();
+ const ConstantArrayType *T =
+ Context.getAsConstantArrayType(Format->getType());
+ assert(T && "String literal not of constant array type!");
+ size_t TypeSize = T->getSize().getZExtValue();
+
+ // In case there's a null byte somewhere.
+ size_t StrLen =
+ std::min(std::max(TypeSize, size_t(1)) - 1, FormatStrRef.find(0));
+ if (!analyze_format_string::ParsePrintfString(
+ H, FormatBytes, FormatBytes + StrLen, getLangOpts(),
+ Context.getTargetInfo(), false)) {
+ DiagID = diag::warn_fortify_source_format_overflow;
+ UsedSize = llvm::APSInt::getUnsigned(H.getSizeLowerBound())
+ .extOrTrunc(SizeTypeWidth);
+ if (BuiltinID == Builtin::BI__builtin___sprintf_chk) {
+ IsChkVariant = true;
+ ObjectIndex = 2;
+ } else {
+ IsChkVariant = false;
+ ObjectIndex = 0;
+ }
+ break;
+ }
+ }
+ return;
+ }
case Builtin::BI__builtin___memcpy_chk:
case Builtin::BI__builtin___memmove_chk:
case Builtin::BI__builtin___memset_chk:
@@ -505,19 +743,19 @@ void Sema::checkFortifiedBuiltinMemoryFunction(FunctionDecl *FD,
if (!ObjArg->tryEvaluateObjectSize(Result, getASTContext(), BOSType))
return;
// Get the object size in the target's size_t width.
- const TargetInfo &TI = getASTContext().getTargetInfo();
- unsigned SizeTypeWidth = TI.getTypeWidth(TI.getSizeType());
ObjectSize = llvm::APSInt::getUnsigned(Result).extOrTrunc(SizeTypeWidth);
}
// Evaluate the number of bytes of the object that this call will use.
- Expr::EvalResult Result;
- Expr *UsedSizeArg = TheCall->getArg(SizeIndex);
- if (!UsedSizeArg->EvaluateAsInt(Result, getASTContext()))
- return;
- llvm::APSInt UsedSize = Result.Val.getInt();
+ if (!UsedSize) {
+ Expr::EvalResult Result;
+ Expr *UsedSizeArg = TheCall->getArg(SizeIndex);
+ if (!UsedSizeArg->EvaluateAsInt(Result, getASTContext()))
+ return;
+ UsedSize = Result.Val.getInt().extOrTrunc(SizeTypeWidth);
+ }
- if (UsedSize.ule(ObjectSize))
+ if (UsedSize.getValue().ule(ObjectSize))
return;
StringRef FunctionName = getASTContext().BuiltinInfo.getName(BuiltinID);
@@ -533,7 +771,7 @@ void Sema::checkFortifiedBuiltinMemoryFunction(FunctionDecl *FD,
DiagRuntimeBehavior(TheCall->getBeginLoc(), TheCall,
PDiag(DiagID)
<< FunctionName << ObjectSize.toString(/*Radix=*/10)
- << UsedSize.toString(/*Radix=*/10));
+ << UsedSize.getValue().toString(/*Radix=*/10));
}
static bool SemaBuiltinSEHScopeCheck(Sema &SemaRef, CallExpr *TheCall,
@@ -1152,6 +1390,49 @@ CheckBuiltinTargetSupport(Sema &S, unsigned BuiltinID, CallExpr *TheCall,
return true;
}
+static void CheckNonNullArgument(Sema &S, const Expr *ArgExpr,
+ SourceLocation CallSiteLoc);
+
+bool Sema::CheckTSBuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
+ switch (TI.getTriple().getArch()) {
+ default:
+ // Some builtins don't require additional checking, so just consider these
+ // acceptable.
+ return false;
+ case llvm::Triple::arm:
+ case llvm::Triple::armeb:
+ case llvm::Triple::thumb:
+ case llvm::Triple::thumbeb:
+ return CheckARMBuiltinFunctionCall(TI, BuiltinID, TheCall);
+ case llvm::Triple::aarch64:
+ case llvm::Triple::aarch64_32:
+ case llvm::Triple::aarch64_be:
+ return CheckAArch64BuiltinFunctionCall(TI, BuiltinID, TheCall);
+ case llvm::Triple::bpfeb:
+ case llvm::Triple::bpfel:
+ return CheckBPFBuiltinFunctionCall(BuiltinID, TheCall);
+ case llvm::Triple::hexagon:
+ return CheckHexagonBuiltinFunctionCall(BuiltinID, TheCall);
+ case llvm::Triple::mips:
+ case llvm::Triple::mipsel:
+ case llvm::Triple::mips64:
+ case llvm::Triple::mips64el:
+ return CheckMipsBuiltinFunctionCall(TI, BuiltinID, TheCall);
+ case llvm::Triple::systemz:
+ return CheckSystemZBuiltinFunctionCall(BuiltinID, TheCall);
+ case llvm::Triple::x86:
+ case llvm::Triple::x86_64:
+ return CheckX86BuiltinFunctionCall(TI, BuiltinID, TheCall);
+ case llvm::Triple::ppc:
+ case llvm::Triple::ppc64:
+ case llvm::Triple::ppc64le:
+ return CheckPPCBuiltinFunctionCall(TI, BuiltinID, TheCall);
+ case llvm::Triple::amdgcn:
+ return CheckAMDGCNBuiltinFunctionCall(BuiltinID, TheCall);
+ }
+}
+
ExprResult
Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
CallExpr *TheCall) {
@@ -1421,6 +1702,19 @@ Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
case Builtin::BI__builtin_nontemporal_load:
case Builtin::BI__builtin_nontemporal_store:
return SemaBuiltinNontemporalOverloaded(TheCallResult);
+ case Builtin::BI__builtin_memcpy_inline: {
+ clang::Expr *SizeOp = TheCall->getArg(2);
+ // We warn about copying to or from `nullptr` pointers when `size` is
+ // greater than 0. When `size` is value dependent we cannot evaluate its
+ // value so we bail out.
+ if (SizeOp->isValueDependent())
+ break;
+ if (!SizeOp->EvaluateKnownConstInt(Context).isNullValue()) {
+ CheckNonNullArgument(*this, TheCall->getArg(0), TheCall->getExprLoc());
+ CheckNonNullArgument(*this, TheCall->getArg(1), TheCall->getExprLoc());
+ }
+ break;
+ }
#define BUILTIN(ID, TYPE, ATTRS)
#define ATOMIC_BUILTIN(ID, TYPE, ATTRS) \
case Builtin::BI##ID: \
@@ -1447,7 +1741,7 @@ Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
case Builtin::BI__builtin_add_overflow:
case Builtin::BI__builtin_sub_overflow:
case Builtin::BI__builtin_mul_overflow:
- if (SemaBuiltinOverflow(*this, TheCall))
+ if (SemaBuiltinOverflow(*this, TheCall, BuiltinID))
return ExprError();
break;
case Builtin::BI__builtin_operator_new:
@@ -1515,6 +1809,36 @@ Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
TheCall->setType(Context.IntTy);
break;
}
+ case Builtin::BI__builtin_expect_with_probability: {
+ // We first want to ensure we are called with 3 arguments
+ if (checkArgCount(*this, TheCall, 3))
+ return ExprError();
+ // then check probability is constant float in range [0.0, 1.0]
+ const Expr *ProbArg = TheCall->getArg(2);
+ SmallVector<PartialDiagnosticAt, 8> Notes;
+ Expr::EvalResult Eval;
+ Eval.Diag = &Notes;
+ if ((!ProbArg->EvaluateAsConstantExpr(Eval, Expr::EvaluateForCodeGen,
+ Context)) ||
+ !Eval.Val.isFloat()) {
+ Diag(ProbArg->getBeginLoc(), diag::err_probability_not_constant_float)
+ << ProbArg->getSourceRange();
+ for (const PartialDiagnosticAt &PDiag : Notes)
+ Diag(PDiag.first, PDiag.second);
+ return ExprError();
+ }
+ llvm::APFloat Probability = Eval.Val.getFloat();
+ bool LoseInfo = false;
+ Probability.convert(llvm::APFloat::IEEEdouble(),
+ llvm::RoundingMode::Dynamic, &LoseInfo);
+ if (!(Probability >= llvm::APFloat(0.0) &&
+ Probability <= llvm::APFloat(1.0))) {
+ Diag(ProbArg->getBeginLoc(), diag::err_probability_out_of_range)
+ << ProbArg->getSourceRange();
+ return ExprError();
+ }
+ break;
+ }
case Builtin::BI__builtin_preserve_access_index:
if (SemaBuiltinPreserveAI(*this, TheCall))
return ExprError();
@@ -1608,62 +1932,55 @@ Sema::CheckBuiltinFunctionCall(FunctionDecl *FDecl, unsigned BuiltinID,
return ExprError();
break;
case Builtin::BI__builtin_os_log_format:
+ Cleanup.setExprNeedsCleanups(true);
+ LLVM_FALLTHROUGH;
case Builtin::BI__builtin_os_log_format_buffer_size:
if (SemaBuiltinOSLogFormat(TheCall))
return ExprError();
break;
+ case Builtin::BI__builtin_frame_address:
+ case Builtin::BI__builtin_return_address: {
+ if (SemaBuiltinConstantArgRange(TheCall, 0, 0, 0xFFFF))
+ return ExprError();
+
+ // -Wframe-address warning if non-zero passed to builtin
+ // return/frame address.
+ Expr::EvalResult Result;
+ if (TheCall->getArg(0)->EvaluateAsInt(Result, getASTContext()) &&
+ Result.Val.getInt() != 0)
+ Diag(TheCall->getBeginLoc(), diag::warn_frame_address)
+ << ((BuiltinID == Builtin::BI__builtin_return_address)
+ ? "__builtin_return_address"
+ : "__builtin_frame_address")
+ << TheCall->getSourceRange();
+ break;
+ }
+
+ case Builtin::BI__builtin_matrix_transpose:
+ return SemaBuiltinMatrixTranspose(TheCall, TheCallResult);
+
+ case Builtin::BI__builtin_matrix_column_major_load:
+ return SemaBuiltinMatrixColumnMajorLoad(TheCall, TheCallResult);
+
+ case Builtin::BI__builtin_matrix_column_major_store:
+ return SemaBuiltinMatrixColumnMajorStore(TheCall, TheCallResult);
}
// Since the target specific builtins for each arch overlap, only check those
// of the arch we are compiling for.
if (Context.BuiltinInfo.isTSBuiltin(BuiltinID)) {
- switch (Context.getTargetInfo().getTriple().getArch()) {
- case llvm::Triple::arm:
- case llvm::Triple::armeb:
- case llvm::Triple::thumb:
- case llvm::Triple::thumbeb:
- if (CheckARMBuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::aarch64:
- case llvm::Triple::aarch64_32:
- case llvm::Triple::aarch64_be:
- if (CheckAArch64BuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::bpfeb:
- case llvm::Triple::bpfel:
- if (CheckBPFBuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::hexagon:
- if (CheckHexagonBuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::mips:
- case llvm::Triple::mipsel:
- case llvm::Triple::mips64:
- case llvm::Triple::mips64el:
- if (CheckMipsBuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::systemz:
- if (CheckSystemZBuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::x86:
- case llvm::Triple::x86_64:
- if (CheckX86BuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- case llvm::Triple::ppc:
- case llvm::Triple::ppc64:
- case llvm::Triple::ppc64le:
- if (CheckPPCBuiltinFunctionCall(BuiltinID, TheCall))
- return ExprError();
- break;
- default:
- break;
+ if (Context.BuiltinInfo.isAuxBuiltinID(BuiltinID)) {
+ assert(Context.getAuxTargetInfo() &&
+ "Aux Target Builtin, but not an aux target?");
+
+ if (CheckTSBuiltinFunctionCall(
+ *Context.getAuxTargetInfo(),
+ Context.BuiltinInfo.getAuxBuiltinID(BuiltinID), TheCall))
+ return ExprError();
+ } else {
+ if (CheckTSBuiltinFunctionCall(Context.getTargetInfo(), BuiltinID,
+ TheCall))
+ return ExprError();
}
}
@@ -1697,6 +2014,9 @@ static unsigned RFT(unsigned t, bool shift = false, bool ForceQuad = false) {
case NeonTypeFlags::Float64:
assert(!shift && "cannot shift float types!");
return (1 << IsQuad) - 1;
+ case NeonTypeFlags::BFloat16:
+ assert(!shift && "cannot shift float types!");
+ return (4 << IsQuad) - 1;
}
llvm_unreachable("Invalid NeonTypeFlag!");
}
@@ -1736,11 +2056,135 @@ static QualType getNeonEltType(NeonTypeFlags Flags, ASTContext &Context,
return Context.FloatTy;
case NeonTypeFlags::Float64:
return Context.DoubleTy;
+ case NeonTypeFlags::BFloat16:
+ return Context.BFloat16Ty;
}
llvm_unreachable("Invalid NeonTypeFlag!");
}
-bool Sema::CheckNeonBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
+bool Sema::CheckSVEBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
+ // Range check SVE intrinsics that take immediate values.
+ SmallVector<std::tuple<int,int,int>, 3> ImmChecks;
+
+ switch (BuiltinID) {
+ default:
+ return false;
+#define GET_SVE_IMMEDIATE_CHECK
+#include "clang/Basic/arm_sve_sema_rangechecks.inc"
+#undef GET_SVE_IMMEDIATE_CHECK
+ }
+
+ // Perform all the immediate checks for this builtin call.
+ bool HasError = false;
+ for (auto &I : ImmChecks) {
+ int ArgNum, CheckTy, ElementSizeInBits;
+ std::tie(ArgNum, CheckTy, ElementSizeInBits) = I;
+
+ typedef bool(*OptionSetCheckFnTy)(int64_t Value);
+
+ // Function that checks whether the operand (ArgNum) is an immediate
+ // that is one of the predefined values.
+ auto CheckImmediateInSet = [&](OptionSetCheckFnTy CheckImm,
+ int ErrDiag) -> bool {
+ // We can't check the value of a dependent argument.
+ Expr *Arg = TheCall->getArg(ArgNum);
+ if (Arg->isTypeDependent() || Arg->isValueDependent())
+ return false;
+
+ // Check constant-ness first.
+ llvm::APSInt Imm;
+ if (SemaBuiltinConstantArg(TheCall, ArgNum, Imm))
+ return true;
+
+ if (!CheckImm(Imm.getSExtValue()))
+ return Diag(TheCall->getBeginLoc(), ErrDiag) << Arg->getSourceRange();
+ return false;
+ };
+
+ switch ((SVETypeFlags::ImmCheckType)CheckTy) {
+ case SVETypeFlags::ImmCheck0_31:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0, 31))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheck0_13:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0, 13))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheck1_16:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 1, 16))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheck0_7:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0, 7))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckExtract:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0,
+ (2048 / ElementSizeInBits) - 1))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckShiftRight:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 1, ElementSizeInBits))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckShiftRightNarrow:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 1,
+ ElementSizeInBits / 2))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckShiftLeft:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0,
+ ElementSizeInBits - 1))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckLaneIndex:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0,
+ (128 / (1 * ElementSizeInBits)) - 1))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckLaneIndexCompRotate:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0,
+ (128 / (2 * ElementSizeInBits)) - 1))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckLaneIndexDot:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0,
+ (128 / (4 * ElementSizeInBits)) - 1))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckComplexRot90_270:
+ if (CheckImmediateInSet([](int64_t V) { return V == 90 || V == 270; },
+ diag::err_rotation_argument_to_cadd))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheckComplexRotAll90:
+ if (CheckImmediateInSet(
+ [](int64_t V) {
+ return V == 0 || V == 90 || V == 180 || V == 270;
+ },
+ diag::err_rotation_argument_to_cmla))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheck0_1:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0, 1))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheck0_2:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0, 2))
+ HasError = true;
+ break;
+ case SVETypeFlags::ImmCheck0_3:
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, 0, 3))
+ HasError = true;
+ break;
+ }
+ }
+
+ return HasError;
+}
+
+bool Sema::CheckNeonBuiltinFunctionCall(const TargetInfo &TI,
+ unsigned BuiltinID, CallExpr *TheCall) {
llvm::APSInt Result;
uint64_t mask = 0;
unsigned TV = 0;
@@ -1774,12 +2218,11 @@ bool Sema::CheckNeonBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
ExprResult RHS = DefaultFunctionArrayLvalueConversion(Arg);
QualType RHSTy = RHS.get()->getType();
- llvm::Triple::ArchType Arch = Context.getTargetInfo().getTriple().getArch();
+ llvm::Triple::ArchType Arch = TI.getTriple().getArch();
bool IsPolyUnsigned = Arch == llvm::Triple::aarch64 ||
Arch == llvm::Triple::aarch64_32 ||
Arch == llvm::Triple::aarch64_be;
- bool IsInt64Long =
- Context.getTargetInfo().getInt64Type() == TargetInfo::SignedLong;
+ bool IsInt64Long = TI.getInt64Type() == TargetInfo::SignedLong;
QualType EltTy =
getNeonEltType(NeonTypeFlags(TV), Context, IsPolyUnsigned, IsInt64Long);
if (HasConstPtr)
@@ -1817,6 +2260,47 @@ bool Sema::CheckMVEBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
}
}
+bool Sema::CheckCDEBuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
+ bool Err = false;
+ switch (BuiltinID) {
+ default:
+ return false;
+#include "clang/Basic/arm_cde_builtin_sema.inc"
+ }
+
+ if (Err)
+ return true;
+
+ return CheckARMCoprocessorImmediate(TI, TheCall->getArg(0), /*WantCDE*/ true);
+}
+
+bool Sema::CheckARMCoprocessorImmediate(const TargetInfo &TI,
+ const Expr *CoprocArg, bool WantCDE) {
+ if (isConstantEvaluated())
+ return false;
+
+ // We can't check the value of a dependent argument.
+ if (CoprocArg->isTypeDependent() || CoprocArg->isValueDependent())
+ return false;
+
+ llvm::APSInt CoprocNoAP;
+ bool IsICE = CoprocArg->isIntegerConstantExpr(CoprocNoAP, Context);
+ (void)IsICE;
+ assert(IsICE && "Coprocossor immediate is not a constant expression");
+ int64_t CoprocNo = CoprocNoAP.getExtValue();
+ assert(CoprocNo >= 0 && "Coprocessor immediate must be non-negative");
+
+ uint32_t CDECoprocMask = TI.getARMCDECoprocMask();
+ bool IsCDECoproc = CoprocNo <= 7 && (CDECoprocMask & (1 << CoprocNo));
+
+ if (IsCDECoproc != WantCDE)
+ return Diag(CoprocArg->getBeginLoc(), diag::err_arm_invalid_coproc)
+ << (int)CoprocNo << (int)WantCDE << CoprocArg->getSourceRange();
+
+ return false;
+}
+
bool Sema::CheckARMBuiltinExclusiveCall(unsigned BuiltinID, CallExpr *TheCall,
unsigned MaxWidth) {
assert((BuiltinID == ARM::BI__builtin_arm_ldrex ||
@@ -1932,7 +2416,8 @@ bool Sema::CheckARMBuiltinExclusiveCall(unsigned BuiltinID, CallExpr *TheCall,
return false;
}
-bool Sema::CheckARMBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
+bool Sema::CheckARMBuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
if (BuiltinID == ARM::BI__builtin_arm_ldrex ||
BuiltinID == ARM::BI__builtin_arm_ldaex ||
BuiltinID == ARM::BI__builtin_arm_strex ||
@@ -1955,10 +2440,12 @@ bool Sema::CheckARMBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
BuiltinID == ARM::BI__builtin_arm_wsrp)
return SemaBuiltinARMSpecialReg(BuiltinID, TheCall, 0, 5, true);
- if (CheckNeonBuiltinFunctionCall(BuiltinID, TheCall))
+ if (CheckNeonBuiltinFunctionCall(TI, BuiltinID, TheCall))
return true;
if (CheckMVEBuiltinFunctionCall(BuiltinID, TheCall))
return true;
+ if (CheckCDEBuiltinFunctionCall(TI, BuiltinID, TheCall))
+ return true;
// For intrinsics which take an immediate value as part of the instruction,
// range check them here.
@@ -1981,11 +2468,33 @@ bool Sema::CheckARMBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
case ARM::BI__builtin_arm_isb:
case ARM::BI__builtin_arm_dbg:
return SemaBuiltinConstantArgRange(TheCall, 0, 0, 15);
- }
-}
-
-bool Sema::CheckAArch64BuiltinFunctionCall(unsigned BuiltinID,
- CallExpr *TheCall) {
+ case ARM::BI__builtin_arm_cdp:
+ case ARM::BI__builtin_arm_cdp2:
+ case ARM::BI__builtin_arm_mcr:
+ case ARM::BI__builtin_arm_mcr2:
+ case ARM::BI__builtin_arm_mrc:
+ case ARM::BI__builtin_arm_mrc2:
+ case ARM::BI__builtin_arm_mcrr:
+ case ARM::BI__builtin_arm_mcrr2:
+ case ARM::BI__builtin_arm_mrrc:
+ case ARM::BI__builtin_arm_mrrc2:
+ case ARM::BI__builtin_arm_ldc:
+ case ARM::BI__builtin_arm_ldcl:
+ case ARM::BI__builtin_arm_ldc2:
+ case ARM::BI__builtin_arm_ldc2l:
+ case ARM::BI__builtin_arm_stc:
+ case ARM::BI__builtin_arm_stcl:
+ case ARM::BI__builtin_arm_stc2:
+ case ARM::BI__builtin_arm_stc2l:
+ return SemaBuiltinConstantArgRange(TheCall, 0, 0, 15) ||
+ CheckARMCoprocessorImmediate(TI, TheCall->getArg(0),
+ /*WantCDE*/ false);
+ }
+}
+
+bool Sema::CheckAArch64BuiltinFunctionCall(const TargetInfo &TI,
+ unsigned BuiltinID,
+ CallExpr *TheCall) {
if (BuiltinID == AArch64::BI__builtin_arm_ldrex ||
BuiltinID == AArch64::BI__builtin_arm_ldaex ||
BuiltinID == AArch64::BI__builtin_arm_strex ||
@@ -2030,7 +2539,10 @@ bool Sema::CheckAArch64BuiltinFunctionCall(unsigned BuiltinID,
if (BuiltinID == AArch64::BI__getReg)
return SemaBuiltinConstantArgRange(TheCall, 0, 0, 31);
- if (CheckNeonBuiltinFunctionCall(BuiltinID, TheCall))
+ if (CheckNeonBuiltinFunctionCall(TI, BuiltinID, TheCall))
+ return true;
+
+ if (CheckSVEBuiltinFunctionCall(BuiltinID, TheCall))
return true;
// For intrinsics which take an immediate value as part of the instruction,
@@ -2049,17 +2561,33 @@ bool Sema::CheckAArch64BuiltinFunctionCall(unsigned BuiltinID,
bool Sema::CheckBPFBuiltinFunctionCall(unsigned BuiltinID,
CallExpr *TheCall) {
- assert(BuiltinID == BPF::BI__builtin_preserve_field_info &&
+ assert((BuiltinID == BPF::BI__builtin_preserve_field_info ||
+ BuiltinID == BPF::BI__builtin_btf_type_id) &&
"unexpected ARM builtin");
if (checkArgCount(*this, TheCall, 2))
return true;
+ Expr *Arg;
+ if (BuiltinID == BPF::BI__builtin_btf_type_id) {
+ // The second argument needs to be a constant int
+ llvm::APSInt Value;
+ Arg = TheCall->getArg(1);
+ if (!Arg->isIntegerConstantExpr(Value, Context)) {
+ Diag(Arg->getBeginLoc(), diag::err_btf_type_id_not_const)
+ << 2 << Arg->getSourceRange();
+ return true;
+ }
+
+ TheCall->setType(Context.UnsignedIntTy);
+ return false;
+ }
+
// The first argument needs to be a record field access.
// If it is an array element access, we delay decision
// to BPF backend to check whether the access is a
// field access or not.
- Expr *Arg = TheCall->getArg(0);
+ Arg = TheCall->getArg(0);
if (Arg->getType()->getAsPlaceholderType() ||
(Arg->IgnoreParens()->getObjectKind() != OK_BitField &&
!dyn_cast<MemberExpr>(Arg->IgnoreParens()) &&
@@ -2070,8 +2598,9 @@ bool Sema::CheckBPFBuiltinFunctionCall(unsigned BuiltinID,
}
// The second argument needs to be a constant int
+ Arg = TheCall->getArg(1);
llvm::APSInt Value;
- if (!TheCall->getArg(1)->isIntegerConstantExpr(Value, Context)) {
+ if (!Arg->isIntegerConstantExpr(Value, Context)) {
Diag(Arg->getBeginLoc(), diag::err_preserve_field_info_not_const)
<< 2 << Arg->getSourceRange();
return true;
@@ -2081,825 +2610,6 @@ bool Sema::CheckBPFBuiltinFunctionCall(unsigned BuiltinID,
return false;
}
-bool Sema::CheckHexagonBuiltinCpu(unsigned BuiltinID, CallExpr *TheCall) {
- struct BuiltinAndString {
- unsigned BuiltinID;
- const char *Str;
- };
-
- static BuiltinAndString ValidCPU[] = {
- { Hexagon::BI__builtin_HEXAGON_A6_vcmpbeq_notany, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_A6_vminub_RdP, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_F2_dfadd, "v66" },
- { Hexagon::BI__builtin_HEXAGON_F2_dfsub, "v66" },
- { Hexagon::BI__builtin_HEXAGON_M2_mnaci, "v66" },
- { Hexagon::BI__builtin_HEXAGON_M6_vabsdiffb, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_M6_vabsdiffub, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S2_mask, "v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_nac, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_p_xacc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_nac, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_rol_i_r_xacc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_vsplatrbp, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_vtrunehb_ppp, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_S6_vtrunohb_ppp, "v62,v65,v66" },
- };
-
- static BuiltinAndString ValidHVX[] = {
- { Hexagon::BI__builtin_HEXAGON_V6_hi, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_hi_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lo, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lo_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_extractw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_extractw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lvsplatb, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lvsplatb_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lvsplath, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lvsplath_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lvsplatw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_lvsplatw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_and_n, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_and_n_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_not, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_not_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_or_n, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_or_n_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2v2, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_scalar2v2_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_pred_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_shuffeqh, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_shuffeqh_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_shuffeqw, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_shuffeqw_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsb, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsb_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsb_sat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsb_sat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsdiffw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsh_sat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsh_sat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsw_sat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vabsw_sat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddb_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddb_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_dv, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddbsat_dv_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddcarry, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddcarry_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddcarrysat, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddcarrysat_128B, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddclbh, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddclbh_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddclbw, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddclbw_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddh_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddh_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddhw_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubh_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddubsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddububb_sat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddububb_sat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduhw_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_dv, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vadduwsat_dv_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddw_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddw_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaddwsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_valignb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_valignb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_valignbi, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_valignbi_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vand, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vand_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandnqrt_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandqrt, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandqrt_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvnqv, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvnqv_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvqv, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvqv_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvrt, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vandvrt_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslh_acc, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslh_acc_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslhv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslhv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslw_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslw_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslwv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vaslwv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrh_acc, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrh_acc_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhbrndsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhbrndsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhbsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhbsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhubrndsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhubrndsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhubsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhubsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrhv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasr_into, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasr_into_128B, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruhubrndsat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruhubrndsat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruhubsat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruhubsat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhrndsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhrndsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhsat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasruwuhsat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrw_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrw_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwhrndsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwhrndsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhrndsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhrndsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwuhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vasrwv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vassign, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vassign_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vassignp, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vassignp_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgb, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgb_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgbrnd, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgbrnd_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavghrnd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavghrnd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgubrnd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgubrnd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguhrnd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguhrnd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguw, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguw_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguwrnd, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavguwrnd_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgwrnd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vavgwrnd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vcl0h, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vcl0h_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vcl0w, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vcl0w_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vcombine, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vcombine_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vd0, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vd0_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdd0, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdd0_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealb4w, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealb4w_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealvdd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdealvdd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdelta, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdelta_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpybus_dv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhb_dv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhisat_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsat_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsuisat_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhsusat_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdmpyhvsat_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vdsaduh_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqb_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqh_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_veqw_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtb_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgth_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtub_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuh_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtuw_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_and, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_and_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_or, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_or_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_xor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vgtw_xor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vinsertwr, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vinsertwr_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlalignb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlalignb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlalignbi_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrb, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrb_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrhv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrhv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrwv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlsrwv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlut4, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlut4_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvbi, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvbi_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_nm, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_nm_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracci, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvvb_oracci_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwhi, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwhi_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_nm, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_nm_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracci, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vlutvwh_oracci_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxb, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxb_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmaxw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminb, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminb_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vminw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabus, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabus_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabusv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabusv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_acc, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabuu_acc_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabuuv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpabuuv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpahb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpahb_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpahhsat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpahhsat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpauhb_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpauhuhsat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpauhuhsat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpsuhuhsat, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpsuhuhsat_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybus, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybus_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybusv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpybv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_64, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyewuh_64_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_acc, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyh_acc_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsat_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsat_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsrs, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhsrs_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhss, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhss_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhus_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhvsrs, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyhvsrs_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyieoh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyieoh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewh_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewh_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiewuh_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyih, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyih_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyihb_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiowh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiowh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwb_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwh_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyiwub_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_64_acc, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_64_acc_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_sacc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_rnd_sacc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_sacc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyowh_sacc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyub_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyubv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuh_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_acc, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhe_acc_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmpyuhv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmux, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vmux_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgb, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgb_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnavgw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnormamth, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnormamth_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnormamtw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnormamtw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnot, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vnot_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackeb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackeb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackeh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackeh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackhb_sat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackhb_sat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackhub_sat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackhub_sat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackob, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackob_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackoh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackoh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackwh_sat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackwh_sat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackwuh_sat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpackwuh_sat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpopcounth, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vpopcounth_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vprefixqb, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vprefixqb_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vprefixqh, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vprefixqh_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vprefixqw, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vprefixqw_128B, "v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrdelta, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrdelta_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_128B, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_acc, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybub_rtt_acc_128B, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybus_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusi_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybusv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpybv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubi_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_128B, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_acc, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyub_rtt_acc_128B, "v65" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrmpyubv_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vror, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vror_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrotr, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrotr_128B, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundhb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundhb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundhub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundhub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrounduhub, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrounduhub_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrounduwuh, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrounduwuh_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundwh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundwh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundwuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vroundwuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vrsadubi_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsatdw, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsatdw_128B, "v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsathub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsathub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsatuwuh, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsatuwuh_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsatwh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsatwh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufeh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufeh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffeb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffeb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffob, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffob_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffvdd, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshuffvdd_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufoeb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufoeb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufoeh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufoeh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufoh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vshufoh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubb_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubb_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_dv, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubbsat_dv_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubcarry, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubcarry_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubh_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubh_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubhsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubhw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubhw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsububh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsububh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsububsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsububsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubububb_sat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubububb_sat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuhsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuhw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuhw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_dv, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubuwsat_dv_128B, "v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubw, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubw_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubw_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubw_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_dv, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vsubwsat_dv_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vswap, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vswap_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyb_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpybus_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_acc, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vtmpyhb_acc_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackob, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackob_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackoh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackoh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackub, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackub_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackuh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vunpackuh_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vxor, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vxor_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vzb, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vzb_128B, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vzh, "v60,v62,v65,v66" },
- { Hexagon::BI__builtin_HEXAGON_V6_vzh_128B, "v60,v62,v65,v66" },
- };
-
- // Sort the tables on first execution so we can binary search them.
- auto SortCmp = [](const BuiltinAndString &LHS, const BuiltinAndString &RHS) {
- return LHS.BuiltinID < RHS.BuiltinID;
- };
- static const bool SortOnce =
- (llvm::sort(ValidCPU, SortCmp),
- llvm::sort(ValidHVX, SortCmp), true);
- (void)SortOnce;
- auto LowerBoundCmp = [](const BuiltinAndString &BI, unsigned BuiltinID) {
- return BI.BuiltinID < BuiltinID;
- };
-
- const TargetInfo &TI = Context.getTargetInfo();
-
- const BuiltinAndString *FC =
- llvm::lower_bound(ValidCPU, BuiltinID, LowerBoundCmp);
- if (FC != std::end(ValidCPU) && FC->BuiltinID == BuiltinID) {
- const TargetOptions &Opts = TI.getTargetOpts();
- StringRef CPU = Opts.CPU;
- if (!CPU.empty()) {
- assert(CPU.startswith("hexagon") && "Unexpected CPU name");
- CPU.consume_front("hexagon");
- SmallVector<StringRef, 3> CPUs;
- StringRef(FC->Str).split(CPUs, ',');
- if (llvm::none_of(CPUs, [CPU](StringRef S) { return S == CPU; }))
- return Diag(TheCall->getBeginLoc(),
- diag::err_hexagon_builtin_unsupported_cpu);
- }
- }
-
- const BuiltinAndString *FH =
- llvm::lower_bound(ValidHVX, BuiltinID, LowerBoundCmp);
- if (FH != std::end(ValidHVX) && FH->BuiltinID == BuiltinID) {
- if (!TI.hasFeature("hvx"))
- return Diag(TheCall->getBeginLoc(),
- diag::err_hexagon_builtin_requires_hvx);
-
- SmallVector<StringRef, 3> HVXs;
- StringRef(FH->Str).split(HVXs, ',');
- bool IsValid = llvm::any_of(HVXs,
- [&TI] (StringRef V) {
- std::string F = "hvx" + V.str();
- return TI.hasFeature(F);
- });
- if (!IsValid)
- return Diag(TheCall->getBeginLoc(),
- diag::err_hexagon_builtin_unsupported_hvx);
- }
-
- return false;
-}
-
bool Sema::CheckHexagonBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
struct ArgInfo {
uint8_t OpNum;
@@ -2916,7 +2626,7 @@ bool Sema::CheckHexagonBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
{ Hexagon::BI__builtin_circ_ldd, {{ 3, true, 4, 3 }} },
{ Hexagon::BI__builtin_circ_ldw, {{ 3, true, 4, 2 }} },
{ Hexagon::BI__builtin_circ_ldh, {{ 3, true, 4, 1 }} },
- { Hexagon::BI__builtin_circ_lduh, {{ 3, true, 4, 0 }} },
+ { Hexagon::BI__builtin_circ_lduh, {{ 3, true, 4, 1 }} },
{ Hexagon::BI__builtin_circ_ldb, {{ 3, true, 4, 0 }} },
{ Hexagon::BI__builtin_circ_ldub, {{ 3, true, 4, 0 }} },
{ Hexagon::BI__builtin_circ_std, {{ 3, true, 4, 3 }} },
@@ -3137,17 +2847,17 @@ bool Sema::CheckHexagonBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
bool Sema::CheckHexagonBuiltinFunctionCall(unsigned BuiltinID,
CallExpr *TheCall) {
- return CheckHexagonBuiltinCpu(BuiltinID, TheCall) ||
- CheckHexagonBuiltinArgument(BuiltinID, TheCall);
+ return CheckHexagonBuiltinArgument(BuiltinID, TheCall);
}
-bool Sema::CheckMipsBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
- return CheckMipsBuiltinCpu(BuiltinID, TheCall) ||
+bool Sema::CheckMipsBuiltinFunctionCall(const TargetInfo &TI,
+ unsigned BuiltinID, CallExpr *TheCall) {
+ return CheckMipsBuiltinCpu(TI, BuiltinID, TheCall) ||
CheckMipsBuiltinArgument(BuiltinID, TheCall);
}
-bool Sema::CheckMipsBuiltinCpu(unsigned BuiltinID, CallExpr *TheCall) {
- const TargetInfo &TI = Context.getTargetInfo();
+bool Sema::CheckMipsBuiltinCpu(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
if (Mips::BI__builtin_mips_addu_qb <= BuiltinID &&
BuiltinID <= Mips::BI__builtin_mips_lwx) {
@@ -3340,10 +3050,14 @@ bool Sema::CheckMipsBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
case Mips::BI__builtin_msa_ld_h: i = 1; l = -1024; u = 1022; m = 2; break;
case Mips::BI__builtin_msa_ld_w: i = 1; l = -2048; u = 2044; m = 4; break;
case Mips::BI__builtin_msa_ld_d: i = 1; l = -4096; u = 4088; m = 8; break;
+ case Mips::BI__builtin_msa_ldr_d: i = 1; l = -4096; u = 4088; m = 8; break;
+ case Mips::BI__builtin_msa_ldr_w: i = 1; l = -2048; u = 2044; m = 4; break;
case Mips::BI__builtin_msa_st_b: i = 2; l = -512; u = 511; m = 1; break;
case Mips::BI__builtin_msa_st_h: i = 2; l = -1024; u = 1022; m = 2; break;
case Mips::BI__builtin_msa_st_w: i = 2; l = -2048; u = 2044; m = 4; break;
case Mips::BI__builtin_msa_st_d: i = 2; l = -4096; u = 4088; m = 8; break;
+ case Mips::BI__builtin_msa_str_d: i = 2; l = -4096; u = 4088; m = 8; break;
+ case Mips::BI__builtin_msa_str_w: i = 2; l = -2048; u = 2044; m = 4; break;
}
if (!m)
@@ -3353,15 +3067,13 @@ bool Sema::CheckMipsBuiltinArgument(unsigned BuiltinID, CallExpr *TheCall) {
SemaBuiltinConstantArgMultiple(TheCall, i, m);
}
-bool Sema::CheckPPCBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
+bool Sema::CheckPPCBuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
unsigned i = 0, l = 0, u = 0;
bool Is64BitBltin = BuiltinID == PPC::BI__builtin_divde ||
BuiltinID == PPC::BI__builtin_divdeu ||
BuiltinID == PPC::BI__builtin_bpermd;
- bool IsTarget64Bit = Context.getTargetInfo()
- .getTypeWidth(Context
- .getTargetInfo()
- .getIntPtrType()) == 64;
+ bool IsTarget64Bit = TI.getTypeWidth(TI.getIntPtrType()) == 64;
bool IsBltinExtDiv = BuiltinID == PPC::BI__builtin_divwe ||
BuiltinID == PPC::BI__builtin_divweu ||
BuiltinID == PPC::BI__builtin_divde ||
@@ -3371,14 +3083,13 @@ bool Sema::CheckPPCBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
return Diag(TheCall->getBeginLoc(), diag::err_64_bit_builtin_32_bit_tgt)
<< TheCall->getSourceRange();
- if ((IsBltinExtDiv && !Context.getTargetInfo().hasFeature("extdiv")) ||
- (BuiltinID == PPC::BI__builtin_bpermd &&
- !Context.getTargetInfo().hasFeature("bpermd")))
+ if ((IsBltinExtDiv && !TI.hasFeature("extdiv")) ||
+ (BuiltinID == PPC::BI__builtin_bpermd && !TI.hasFeature("bpermd")))
return Diag(TheCall->getBeginLoc(), diag::err_ppc_builtin_only_on_pwr7)
<< TheCall->getSourceRange();
auto SemaVSXCheck = [&](CallExpr *TheCall) -> bool {
- if (!Context.getTargetInfo().hasFeature("vsx"))
+ if (!TI.hasFeature("vsx"))
return Diag(TheCall->getBeginLoc(), diag::err_ppc_builtin_only_on_pwr7)
<< TheCall->getSourceRange();
return false;
@@ -3414,10 +3125,75 @@ bool Sema::CheckPPCBuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
SemaBuiltinConstantArgRange(TheCall, 1, 0, 1);
case PPC::BI__builtin_pack_vector_int128:
return SemaVSXCheck(TheCall);
+ case PPC::BI__builtin_altivec_vgnb:
+ return SemaBuiltinConstantArgRange(TheCall, 1, 2, 7);
+ case PPC::BI__builtin_vsx_xxeval:
+ return SemaBuiltinConstantArgRange(TheCall, 3, 0, 255);
+ case PPC::BI__builtin_altivec_vsldbi:
+ return SemaBuiltinConstantArgRange(TheCall, 2, 0, 7);
+ case PPC::BI__builtin_altivec_vsrdbi:
+ return SemaBuiltinConstantArgRange(TheCall, 2, 0, 7);
+ case PPC::BI__builtin_vsx_xxpermx:
+ return SemaBuiltinConstantArgRange(TheCall, 3, 0, 7);
}
return SemaBuiltinConstantArgRange(TheCall, i, l, u);
}
+bool Sema::CheckAMDGCNBuiltinFunctionCall(unsigned BuiltinID,
+ CallExpr *TheCall) {
+ // position of memory order and scope arguments in the builtin
+ unsigned OrderIndex, ScopeIndex;
+ switch (BuiltinID) {
+ case AMDGPU::BI__builtin_amdgcn_atomic_inc32:
+ case AMDGPU::BI__builtin_amdgcn_atomic_inc64:
+ case AMDGPU::BI__builtin_amdgcn_atomic_dec32:
+ case AMDGPU::BI__builtin_amdgcn_atomic_dec64:
+ OrderIndex = 2;
+ ScopeIndex = 3;
+ break;
+ case AMDGPU::BI__builtin_amdgcn_fence:
+ OrderIndex = 0;
+ ScopeIndex = 1;
+ break;
+ default:
+ return false;
+ }
+
+ ExprResult Arg = TheCall->getArg(OrderIndex);
+ auto ArgExpr = Arg.get();
+ Expr::EvalResult ArgResult;
+
+ if (!ArgExpr->EvaluateAsInt(ArgResult, Context))
+ return Diag(ArgExpr->getExprLoc(), diag::err_typecheck_expect_int)
+ << ArgExpr->getType();
+ int ord = ArgResult.Val.getInt().getZExtValue();
+
+ // Check valididty of memory ordering as per C11 / C++11's memody model.
+ switch (static_cast<llvm::AtomicOrderingCABI>(ord)) {
+ case llvm::AtomicOrderingCABI::acquire:
+ case llvm::AtomicOrderingCABI::release:
+ case llvm::AtomicOrderingCABI::acq_rel:
+ case llvm::AtomicOrderingCABI::seq_cst:
+ break;
+ default: {
+ return Diag(ArgExpr->getBeginLoc(),
+ diag::warn_atomic_op_has_invalid_memory_order)
+ << ArgExpr->getSourceRange();
+ }
+ }
+
+ Arg = TheCall->getArg(ScopeIndex);
+ ArgExpr = Arg.get();
+ Expr::EvalResult ArgResult1;
+ // Check that sync scope is a constant literal
+ if (!ArgExpr->EvaluateAsConstantExpr(ArgResult1, Expr::EvaluateForCodeGen,
+ Context))
+ return Diag(ArgExpr->getExprLoc(), diag::err_expr_not_string_literal)
+ << ArgExpr->getType();
+
+ return false;
+}
+
bool Sema::CheckSystemZBuiltinFunctionCall(unsigned BuiltinID,
CallExpr *TheCall) {
if (BuiltinID == SystemZ::BI__builtin_tabort) {
@@ -3486,7 +3262,8 @@ bool Sema::CheckSystemZBuiltinFunctionCall(unsigned BuiltinID,
/// SemaBuiltinCpuSupports - Handle __builtin_cpu_supports(char *).
/// This checks that the target supports __builtin_cpu_supports and
/// that the string argument is constant and valid.
-static bool SemaBuiltinCpuSupports(Sema &S, CallExpr *TheCall) {
+static bool SemaBuiltinCpuSupports(Sema &S, const TargetInfo &TI,
+ CallExpr *TheCall) {
Expr *Arg = TheCall->getArg(0);
// Check if the argument is a string literal.
@@ -3497,7 +3274,7 @@ static bool SemaBuiltinCpuSupports(Sema &S, CallExpr *TheCall) {
// Check the contents of the string.
StringRef Feature =
cast<StringLiteral>(Arg->IgnoreParenImpCasts())->getString();
- if (!S.Context.getTargetInfo().validateCpuSupports(Feature))
+ if (!TI.validateCpuSupports(Feature))
return S.Diag(TheCall->getBeginLoc(), diag::err_invalid_cpu_supports)
<< Arg->getSourceRange();
return false;
@@ -3506,7 +3283,7 @@ static bool SemaBuiltinCpuSupports(Sema &S, CallExpr *TheCall) {
/// SemaBuiltinCpuIs - Handle __builtin_cpu_is(char *).
/// This checks that the target supports __builtin_cpu_is and
/// that the string argument is constant and valid.
-static bool SemaBuiltinCpuIs(Sema &S, CallExpr *TheCall) {
+static bool SemaBuiltinCpuIs(Sema &S, const TargetInfo &TI, CallExpr *TheCall) {
Expr *Arg = TheCall->getArg(0);
// Check if the argument is a string literal.
@@ -3517,7 +3294,7 @@ static bool SemaBuiltinCpuIs(Sema &S, CallExpr *TheCall) {
// Check the contents of the string.
StringRef Feature =
cast<StringLiteral>(Arg->IgnoreParenImpCasts())->getString();
- if (!S.Context.getTargetInfo().validateCpuIs(Feature))
+ if (!TI.validateCpuIs(Feature))
return S.Diag(TheCall->getBeginLoc(), diag::err_invalid_cpu_is)
<< Arg->getSourceRange();
return false;
@@ -3831,6 +3608,64 @@ bool Sema::CheckX86BuiltinGatherScatterScale(unsigned BuiltinID,
<< Arg->getSourceRange();
}
+enum { TileRegLow = 0, TileRegHigh = 7 };
+
+bool Sema::CheckX86BuiltinTileArgumentsRange(CallExpr *TheCall,
+ ArrayRef<int> ArgNums) {
+ for (int ArgNum : ArgNums) {
+ if (SemaBuiltinConstantArgRange(TheCall, ArgNum, TileRegLow, TileRegHigh))
+ return true;
+ }
+ return false;
+}
+
+bool Sema::CheckX86BuiltinTileArgumentsRange(CallExpr *TheCall, int ArgNum) {
+ return SemaBuiltinConstantArgRange(TheCall, ArgNum, TileRegLow, TileRegHigh);
+}
+
+bool Sema::CheckX86BuiltinTileDuplicate(CallExpr *TheCall,
+ ArrayRef<int> ArgNums) {
+ // Because the max number of tile register is TileRegHigh + 1, so here we use
+ // each bit to represent the usage of them in bitset.
+ std::bitset<TileRegHigh + 1> ArgValues;
+ for (int ArgNum : ArgNums) {
+ llvm::APSInt Arg;
+ SemaBuiltinConstantArg(TheCall, ArgNum, Arg);
+ int ArgExtValue = Arg.getExtValue();
+ assert((ArgExtValue >= TileRegLow || ArgExtValue <= TileRegHigh) &&
+ "Incorrect tile register num.");
+ if (ArgValues.test(ArgExtValue))
+ return Diag(TheCall->getBeginLoc(),
+ diag::err_x86_builtin_tile_arg_duplicate)
+ << TheCall->getArg(ArgNum)->getSourceRange();
+ ArgValues.set(ArgExtValue);
+ }
+ return false;
+}
+
+bool Sema::CheckX86BuiltinTileRangeAndDuplicate(CallExpr *TheCall,
+ ArrayRef<int> ArgNums) {
+ return CheckX86BuiltinTileArgumentsRange(TheCall, ArgNums) ||
+ CheckX86BuiltinTileDuplicate(TheCall, ArgNums);
+}
+
+bool Sema::CheckX86BuiltinTileArguments(unsigned BuiltinID, CallExpr *TheCall) {
+ switch (BuiltinID) {
+ default:
+ return false;
+ case X86::BI__builtin_ia32_tileloadd64:
+ case X86::BI__builtin_ia32_tileloaddt164:
+ case X86::BI__builtin_ia32_tilestored64:
+ case X86::BI__builtin_ia32_tilezero:
+ return CheckX86BuiltinTileArgumentsRange(TheCall, 0);
+ case X86::BI__builtin_ia32_tdpbssd:
+ case X86::BI__builtin_ia32_tdpbsud:
+ case X86::BI__builtin_ia32_tdpbusd:
+ case X86::BI__builtin_ia32_tdpbuud:
+ case X86::BI__builtin_ia32_tdpbf16ps:
+ return CheckX86BuiltinTileRangeAndDuplicate(TheCall, {0, 1, 2});
+ }
+}
static bool isX86_32Builtin(unsigned BuiltinID) {
// These builtins only work on x86-32 targets.
switch (BuiltinID) {
@@ -3842,15 +3677,16 @@ static bool isX86_32Builtin(unsigned BuiltinID) {
return false;
}
-bool Sema::CheckX86BuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
+bool Sema::CheckX86BuiltinFunctionCall(const TargetInfo &TI, unsigned BuiltinID,
+ CallExpr *TheCall) {
if (BuiltinID == X86::BI__builtin_cpu_supports)
- return SemaBuiltinCpuSupports(*this, TheCall);
+ return SemaBuiltinCpuSupports(*this, TI, TheCall);
if (BuiltinID == X86::BI__builtin_cpu_is)
- return SemaBuiltinCpuIs(*this, TheCall);
+ return SemaBuiltinCpuIs(*this, TI, TheCall);
// Check for 32-bit only builtins on a 64-bit target.
- const llvm::Triple &TT = Context.getTargetInfo().getTriple();
+ const llvm::Triple &TT = TI.getTriple();
if (TT.getArch() != llvm::Triple::x86 && isX86_32Builtin(BuiltinID))
return Diag(TheCall->getCallee()->getBeginLoc(),
diag::err_32_bit_builtin_64_bit_tgt);
@@ -3863,6 +3699,10 @@ bool Sema::CheckX86BuiltinFunctionCall(unsigned BuiltinID, CallExpr *TheCall) {
if (CheckX86BuiltinGatherScatterScale(BuiltinID, TheCall))
return true;
+ // If the intrinsic has a tile arguments, make sure they are valid.
+ if (CheckX86BuiltinTileArguments(BuiltinID, TheCall))
+ return true;
+
// For intrinsics which take an immediate value as part of the instruction,
// range check them here.
int i = 0, l = 0, u = 0;
@@ -4473,6 +4313,24 @@ void Sema::checkCall(NamedDecl *FDecl, const FunctionProtoType *Proto,
}
}
+ if (FDecl && FDecl->hasAttr<AllocAlignAttr>()) {
+ auto *AA = FDecl->getAttr<AllocAlignAttr>();
+ const Expr *Arg = Args[AA->getParamIndex().getASTIndex()];
+ if (!Arg->isValueDependent()) {
+ Expr::EvalResult Align;
+ if (Arg->EvaluateAsInt(Align, Context)) {
+ const llvm::APSInt &I = Align.Val.getInt();
+ if (!I.isPowerOf2())
+ Diag(Arg->getExprLoc(), diag::warn_alignment_not_power_of_two)
+ << Arg->getSourceRange();
+
+ if (I > Sema::MaximumAlignment)
+ Diag(Arg->getExprLoc(), diag::warn_assume_aligned_too_great)
+ << Arg->getSourceRange() << Sema::MaximumAlignment;
+ }
+ }
+ }
+
if (FD)
diagnoseArgDependentDiagnoseIfAttrs(FD, ThisArg, Args, Loc);
}
@@ -5491,6 +5349,15 @@ Sema::SemaBuiltinAtomicOverloaded(ExprResult TheCallResult) {
// gracefully.
TheCall->setType(ResultType);
+ // Prohibit use of _ExtInt with atomic builtins.
+ // The arguments would have already been converted to the first argument's
+ // type, so only need to check the first argument.
+ const auto *ExtIntValType = ValType->getAs<ExtIntType>();
+ if (ExtIntValType && !llvm::isPowerOf2_64(ExtIntValType->getNumBits())) {
+ Diag(FirstArg->getExprLoc(), diag::err_atomic_builtin_ext_int_size);
+ return ExprError();
+ }
+
return TheCallResult;
}
@@ -6193,11 +6060,9 @@ bool Sema::SemaBuiltinAssumeAligned(CallExpr *TheCall) {
return Diag(TheCall->getBeginLoc(), diag::err_alignment_not_power_of_two)
<< Arg->getSourceRange();
- // Alignment calculations can wrap around if it's greater than 2**29.
- unsigned MaximumAlignment = 536870912;
- if (Result > MaximumAlignment)
+ if (Result > Sema::MaximumAlignment)
Diag(TheCall->getBeginLoc(), diag::warn_assume_aligned_too_great)
- << Arg->getSourceRange() << MaximumAlignment;
+ << Arg->getSourceRange() << Sema::MaximumAlignment;
}
if (NumArgs > 2) {
@@ -6412,7 +6277,8 @@ static bool IsShiftedByte(llvm::APSInt Value) {
/// SemaBuiltinConstantArgShiftedByte - Check if argument ArgNum of TheCall is
/// a constant expression representing an arbitrary byte value shifted left by
/// a multiple of 8 bits.
-bool Sema::SemaBuiltinConstantArgShiftedByte(CallExpr *TheCall, int ArgNum) {
+bool Sema::SemaBuiltinConstantArgShiftedByte(CallExpr *TheCall, int ArgNum,
+ unsigned ArgBits) {
llvm::APSInt Result;
// We can't check the value of a dependent argument.
@@ -6424,6 +6290,10 @@ bool Sema::SemaBuiltinConstantArgShiftedByte(CallExpr *TheCall, int ArgNum) {
if (SemaBuiltinConstantArg(TheCall, ArgNum, Result))
return true;
+ // Truncate to the given size.
+ Result = Result.getLoBits(ArgBits);
+ Result.setIsUnsigned(true);
+
if (IsShiftedByte(Result))
return false;
@@ -6437,7 +6307,8 @@ bool Sema::SemaBuiltinConstantArgShiftedByte(CallExpr *TheCall, int ArgNum) {
/// 0x00FF, 0x01FF, ..., 0xFFFF). This strange range check is needed for some
/// Arm MVE intrinsics.
bool Sema::SemaBuiltinConstantArgShiftedByteOrXXFF(CallExpr *TheCall,
- int ArgNum) {
+ int ArgNum,
+ unsigned ArgBits) {
llvm::APSInt Result;
// We can't check the value of a dependent argument.
@@ -6449,6 +6320,10 @@ bool Sema::SemaBuiltinConstantArgShiftedByteOrXXFF(CallExpr *TheCall,
if (SemaBuiltinConstantArg(TheCall, ArgNum, Result))
return true;
+ // Truncate to the given size.
+ Result = Result.getLoBits(ArgBits);
+ Result.setIsUnsigned(true);
+
// Check to see if it's in either of the required forms.
if (IsShiftedByte(Result) ||
(Result > 0 && Result < 0x10000 && (Result & 0xFF) == 0xFF))
@@ -10228,6 +10103,9 @@ struct IntRange {
false/*NonNegative*/);
}
+ if (const auto *EIT = dyn_cast<ExtIntType>(T))
+ return IntRange(EIT->getNumBits(), EIT->isUnsigned());
+
const BuiltinType *BT = cast<BuiltinType>(T);
assert(BT->isInteger());
@@ -10251,6 +10129,9 @@ struct IntRange {
if (const EnumType *ET = dyn_cast<EnumType>(T))
T = C.getCanonicalType(ET->getDecl()->getIntegerType()).getTypePtr();
+ if (const auto *EIT = dyn_cast<ExtIntType>(T))
+ return IntRange(EIT->getNumBits(), EIT->isUnsigned());
+
const BuiltinType *BT = cast<BuiltinType>(T);
assert(BT->isInteger());
@@ -12064,27 +11945,31 @@ static void CheckImplicitConversion(Sema &S, Expr *E, QualType T,
}
}
-static void CheckConditionalOperator(Sema &S, ConditionalOperator *E,
+static void CheckConditionalOperator(Sema &S, AbstractConditionalOperator *E,
SourceLocation CC, QualType T);
static void CheckConditionalOperand(Sema &S, Expr *E, QualType T,
SourceLocation CC, bool &ICContext) {
E = E->IgnoreParenImpCasts();
- if (isa<ConditionalOperator>(E))
- return CheckConditionalOperator(S, cast<ConditionalOperator>(E), CC, T);
+ if (auto *CO = dyn_cast<AbstractConditionalOperator>(E))
+ return CheckConditionalOperator(S, CO, CC, T);
AnalyzeImplicitConversions(S, E, CC);
if (E->getType() != T)
return CheckImplicitConversion(S, E, T, CC, &ICContext);
}
-static void CheckConditionalOperator(Sema &S, ConditionalOperator *E,
+static void CheckConditionalOperator(Sema &S, AbstractConditionalOperator *E,
SourceLocation CC, QualType T) {
AnalyzeImplicitConversions(S, E->getCond(), E->getQuestionLoc());
+ Expr *TrueExpr = E->getTrueExpr();
+ if (auto *BCO = dyn_cast<BinaryConditionalOperator>(E))
+ TrueExpr = BCO->getCommon();
+
bool Suspicious = false;
- CheckConditionalOperand(S, E->getTrueExpr(), T, CC, Suspicious);
+ CheckConditionalOperand(S, TrueExpr, T, CC, Suspicious);
CheckConditionalOperand(S, E->getFalseExpr(), T, CC, Suspicious);
if (T->isBooleanType())
@@ -12103,7 +11988,7 @@ static void CheckConditionalOperator(Sema &S, ConditionalOperator *E,
if (E->getType() == T) return;
Suspicious = false;
- CheckImplicitConversion(S, E->getTrueExpr()->IgnoreParenImpCasts(),
+ CheckImplicitConversion(S, TrueExpr->IgnoreParenImpCasts(),
E->getType(), CC, &Suspicious);
if (!Suspicious)
CheckImplicitConversion(S, E->getFalseExpr()->IgnoreParenImpCasts(),
@@ -12120,24 +12005,44 @@ static void CheckBoolLikeConversion(Sema &S, Expr *E, SourceLocation CC) {
CheckImplicitConversion(S, E->IgnoreParenImpCasts(), S.Context.BoolTy, CC);
}
-/// AnalyzeImplicitConversions - Find and report any interesting
-/// implicit conversions in the given expression. There are a couple
-/// of competing diagnostics here, -Wconversion and -Wsign-compare.
-static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
- bool IsListInit/*= false*/) {
+namespace {
+struct AnalyzeImplicitConversionsWorkItem {
+ Expr *E;
+ SourceLocation CC;
+ bool IsListInit;
+};
+}
+
+/// Data recursive variant of AnalyzeImplicitConversions. Subexpressions
+/// that should be visited are added to WorkList.
+static void AnalyzeImplicitConversions(
+ Sema &S, AnalyzeImplicitConversionsWorkItem Item,
+ llvm::SmallVectorImpl<AnalyzeImplicitConversionsWorkItem> &WorkList) {
+ Expr *OrigE = Item.E;
+ SourceLocation CC = Item.CC;
+
QualType T = OrigE->getType();
Expr *E = OrigE->IgnoreParenImpCasts();
// Propagate whether we are in a C++ list initialization expression.
// If so, we do not issue warnings for implicit int-float conversion
// precision loss, because C++11 narrowing already handles it.
- IsListInit =
- IsListInit || (isa<InitListExpr>(OrigE) && S.getLangOpts().CPlusPlus);
+ bool IsListInit = Item.IsListInit ||
+ (isa<InitListExpr>(OrigE) && S.getLangOpts().CPlusPlus);
if (E->isTypeDependent() || E->isValueDependent())
return;
- if (const auto *UO = dyn_cast<UnaryOperator>(E))
+ Expr *SourceExpr = E;
+ // Examine, but don't traverse into the source expression of an
+ // OpaqueValueExpr, since it may have multiple parents and we don't want to
+ // emit duplicate diagnostics. Its fine to examine the form or attempt to
+ // evaluate it in the context of checking the specific conversion to T though.
+ if (auto *OVE = dyn_cast<OpaqueValueExpr>(E))
+ if (auto *Src = OVE->getSourceExpr())
+ SourceExpr = Src;
+
+ if (const auto *UO = dyn_cast<UnaryOperator>(SourceExpr))
if (UO->getOpcode() == UO_Not &&
UO->getSubExpr()->isKnownToHaveBooleanValue())
S.Diag(UO->getBeginLoc(), diag::warn_bitwise_negation_bool)
@@ -12146,21 +12051,20 @@ static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
// For conditional operators, we analyze the arguments as if they
// were being fed directly into the output.
- if (isa<ConditionalOperator>(E)) {
- ConditionalOperator *CO = cast<ConditionalOperator>(E);
+ if (auto *CO = dyn_cast<AbstractConditionalOperator>(SourceExpr)) {
CheckConditionalOperator(S, CO, CC, T);
return;
}
// Check implicit argument conversions for function calls.
- if (CallExpr *Call = dyn_cast<CallExpr>(E))
+ if (CallExpr *Call = dyn_cast<CallExpr>(SourceExpr))
CheckImplicitArgumentConversions(S, Call, CC);
// Go ahead and check any implicit conversions we might have skipped.
// The non-canonical typecheck is just an optimization;
// CheckImplicitConversion will filter out dead implicit conversions.
- if (E->getType() != T)
- CheckImplicitConversion(S, E, T, CC, nullptr, IsListInit);
+ if (SourceExpr->getType() != T)
+ CheckImplicitConversion(S, SourceExpr, T, CC, nullptr, IsListInit);
// Now continue drilling into this expression.
@@ -12170,7 +12074,7 @@ static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
// FIXME: Use a more uniform representation for this.
for (auto *SE : POE->semantics())
if (auto *OVE = dyn_cast<OpaqueValueExpr>(SE))
- AnalyzeImplicitConversions(S, OVE->getSourceExpr(), CC, IsListInit);
+ WorkList.push_back({OVE->getSourceExpr(), CC, IsListInit});
}
// Skip past explicit casts.
@@ -12178,7 +12082,8 @@ static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
E = CE->getSubExpr()->IgnoreParenImpCasts();
if (!CE->getType()->isVoidType() && E->getType()->isAtomicType())
S.Diag(E->getBeginLoc(), diag::warn_atomic_implicit_seq_cst);
- return AnalyzeImplicitConversions(S, E, CC, IsListInit);
+ WorkList.push_back({E, CC, IsListInit});
+ return;
}
if (BinaryOperator *BO = dyn_cast<BinaryOperator>(E)) {
@@ -12217,7 +12122,7 @@ static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
// Ignore checking string literals that are in logical and operators.
// This is a common pattern for asserts.
continue;
- AnalyzeImplicitConversions(S, ChildExpr, CC, IsListInit);
+ WorkList.push_back({ChildExpr, CC, IsListInit});
}
if (BO && BO->isLogicalOp()) {
@@ -12241,6 +12146,17 @@ static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
}
}
+/// AnalyzeImplicitConversions - Find and report any interesting
+/// implicit conversions in the given expression. There are a couple
+/// of competing diagnostics here, -Wconversion and -Wsign-compare.
+static void AnalyzeImplicitConversions(Sema &S, Expr *OrigE, SourceLocation CC,
+ bool IsListInit/*= false*/) {
+ llvm::SmallVector<AnalyzeImplicitConversionsWorkItem, 16> WorkList;
+ WorkList.push_back({OrigE, CC, IsListInit});
+ while (!WorkList.empty())
+ AnalyzeImplicitConversions(S, WorkList.pop_back_val(), WorkList);
+}
+
/// Diagnose integer type and any valid implicit conversion to it.
static bool checkOpenCLEnqueueIntType(Sema &S, Expr *E, const QualType &IntT) {
// Taking into account implicit conversions,
@@ -13182,6 +13098,11 @@ public:
}
void VisitCallExpr(const CallExpr *CE) {
+ // FIXME: CXXNewExpr and CXXDeleteExpr implicitly call functions.
+
+ if (CE->isUnevaluatedBuiltinCall(Context))
+ return;
+
// C++11 [intro.execution]p15:
// When calling a function [...], every value computation and side effect
// associated with any argument expression, or with the postfix expression
@@ -13189,10 +13110,165 @@ public:
// expression or statement in the body of the function [and thus before
// the value computation of its result].
SequencedSubexpression Sequenced(*this);
- SemaRef.runWithSufficientStackSpace(CE->getExprLoc(),
- [&] { Base::VisitCallExpr(CE); });
+ SemaRef.runWithSufficientStackSpace(CE->getExprLoc(), [&] {
+ // C++17 [expr.call]p5
+ // The postfix-expression is sequenced before each expression in the
+ // expression-list and any default argument. [...]
+ SequenceTree::Seq CalleeRegion;
+ SequenceTree::Seq OtherRegion;
+ if (SemaRef.getLangOpts().CPlusPlus17) {
+ CalleeRegion = Tree.allocate(Region);
+ OtherRegion = Tree.allocate(Region);
+ } else {
+ CalleeRegion = Region;
+ OtherRegion = Region;
+ }
+ SequenceTree::Seq OldRegion = Region;
- // FIXME: CXXNewExpr and CXXDeleteExpr implicitly call functions.
+ // Visit the callee expression first.
+ Region = CalleeRegion;
+ if (SemaRef.getLangOpts().CPlusPlus17) {
+ SequencedSubexpression Sequenced(*this);
+ Visit(CE->getCallee());
+ } else {
+ Visit(CE->getCallee());
+ }
+
+ // Then visit the argument expressions.
+ Region = OtherRegion;
+ for (const Expr *Argument : CE->arguments())
+ Visit(Argument);
+
+ Region = OldRegion;
+ if (SemaRef.getLangOpts().CPlusPlus17) {
+ Tree.merge(CalleeRegion);
+ Tree.merge(OtherRegion);
+ }
+ });
+ }
+
+ void VisitCXXOperatorCallExpr(const CXXOperatorCallExpr *CXXOCE) {
+ // C++17 [over.match.oper]p2:
+ // [...] the operator notation is first transformed to the equivalent
+ // function-call notation as summarized in Table 12 (where @ denotes one
+ // of the operators covered in the specified subclause). However, the
+ // operands are sequenced in the order prescribed for the built-in
+ // operator (Clause 8).
+ //
+ // From the above only overloaded binary operators and overloaded call
+ // operators have sequencing rules in C++17 that we need to handle
+ // separately.
+ if (!SemaRef.getLangOpts().CPlusPlus17 ||
+ (CXXOCE->getNumArgs() != 2 && CXXOCE->getOperator() != OO_Call))
+ return VisitCallExpr(CXXOCE);
+
+ enum {
+ NoSequencing,
+ LHSBeforeRHS,
+ RHSBeforeLHS,
+ LHSBeforeRest
+ } SequencingKind;
+ switch (CXXOCE->getOperator()) {
+ case OO_Equal:
+ case OO_PlusEqual:
+ case OO_MinusEqual:
+ case OO_StarEqual:
+ case OO_SlashEqual:
+ case OO_PercentEqual:
+ case OO_CaretEqual:
+ case OO_AmpEqual:
+ case OO_PipeEqual:
+ case OO_LessLessEqual:
+ case OO_GreaterGreaterEqual:
+ SequencingKind = RHSBeforeLHS;
+ break;
+
+ case OO_LessLess:
+ case OO_GreaterGreater:
+ case OO_AmpAmp:
+ case OO_PipePipe:
+ case OO_Comma:
+ case OO_ArrowStar:
+ case OO_Subscript:
+ SequencingKind = LHSBeforeRHS;
+ break;
+
+ case OO_Call:
+ SequencingKind = LHSBeforeRest;
+ break;
+
+ default:
+ SequencingKind = NoSequencing;
+ break;
+ }
+
+ if (SequencingKind == NoSequencing)
+ return VisitCallExpr(CXXOCE);
+
+ // This is a call, so all subexpressions are sequenced before the result.
+ SequencedSubexpression Sequenced(*this);
+
+ SemaRef.runWithSufficientStackSpace(CXXOCE->getExprLoc(), [&] {
+ assert(SemaRef.getLangOpts().CPlusPlus17 &&
+ "Should only get there with C++17 and above!");
+ assert((CXXOCE->getNumArgs() == 2 || CXXOCE->getOperator() == OO_Call) &&
+ "Should only get there with an overloaded binary operator"
+ " or an overloaded call operator!");
+
+ if (SequencingKind == LHSBeforeRest) {
+ assert(CXXOCE->getOperator() == OO_Call &&
+ "We should only have an overloaded call operator here!");
+
+ // This is very similar to VisitCallExpr, except that we only have the
+ // C++17 case. The postfix-expression is the first argument of the
+ // CXXOperatorCallExpr. The expressions in the expression-list, if any,
+ // are in the following arguments.
+ //
+ // Note that we intentionally do not visit the callee expression since
+ // it is just a decayed reference to a function.
+ SequenceTree::Seq PostfixExprRegion = Tree.allocate(Region);
+ SequenceTree::Seq ArgsRegion = Tree.allocate(Region);
+ SequenceTree::Seq OldRegion = Region;
+
+ assert(CXXOCE->getNumArgs() >= 1 &&
+ "An overloaded call operator must have at least one argument"
+ " for the postfix-expression!");
+ const Expr *PostfixExpr = CXXOCE->getArgs()[0];
+ llvm::ArrayRef<const Expr *> Args(CXXOCE->getArgs() + 1,
+ CXXOCE->getNumArgs() - 1);
+
+ // Visit the postfix-expression first.
+ {
+ Region = PostfixExprRegion;
+ SequencedSubexpression Sequenced(*this);
+ Visit(PostfixExpr);
+ }
+
+ // Then visit the argument expressions.
+ Region = ArgsRegion;
+ for (const Expr *Arg : Args)
+ Visit(Arg);
+
+ Region = OldRegion;
+ Tree.merge(PostfixExprRegion);
+ Tree.merge(ArgsRegion);
+ } else {
+ assert(CXXOCE->getNumArgs() == 2 &&
+ "Should only have two arguments here!");
+ assert((SequencingKind == LHSBeforeRHS ||
+ SequencingKind == RHSBeforeLHS) &&
+ "Unexpected sequencing kind!");
+
+ // We do not visit the callee expression since it is just a decayed
+ // reference to a function.
+ const Expr *E1 = CXXOCE->getArg(0);
+ const Expr *E2 = CXXOCE->getArg(1);
+ if (SequencingKind == RHSBeforeLHS)
+ std::swap(E1, E2);
+
+ return VisitSequencedExpressions(E1, E2);
+ }
+ });
}
void VisitCXXConstructExpr(const CXXConstructExpr *CCE) {
@@ -13323,11 +13399,12 @@ bool Sema::CheckParmsForFunctionDef(ArrayRef<ParmVarDecl *> Parameters,
// C99 6.9.1p5: If the declarator includes a parameter type list, the
// declaration of each parameter shall include an identifier.
- if (CheckParameterNames &&
- Param->getIdentifier() == nullptr &&
- !Param->isImplicit() &&
- !getLangOpts().CPlusPlus)
- Diag(Param->getLocation(), diag::err_parameter_name_omitted);
+ if (CheckParameterNames && Param->getIdentifier() == nullptr &&
+ !Param->isImplicit() && !getLangOpts().CPlusPlus) {
+ // Diagnose this as an extension in C17 and earlier.
+ if (!getLangOpts().C2x)
+ Diag(Param->getLocation(), diag::ext_parameter_name_omitted_c2x);
+ }
// C99 6.7.5.3p12:
// If the function declarator is not part of a definition of that
@@ -13380,17 +13457,233 @@ bool Sema::CheckParmsForFunctionDef(ArrayRef<ParmVarDecl *> Parameters,
return HasInvalidParm;
}
-/// A helper function to get the alignment of a Decl referred to by DeclRefExpr
-/// or MemberExpr.
-static CharUnits getDeclAlign(Expr *E, CharUnits TypeAlign,
- ASTContext &Context) {
- if (const auto *DRE = dyn_cast<DeclRefExpr>(E))
- return Context.getDeclAlign(DRE->getDecl());
+Optional<std::pair<CharUnits, CharUnits>>
+static getBaseAlignmentAndOffsetFromPtr(const Expr *E, ASTContext &Ctx);
+
+/// Compute the alignment and offset of the base class object given the
+/// derived-to-base cast expression and the alignment and offset of the derived
+/// class object.
+static std::pair<CharUnits, CharUnits>
+getDerivedToBaseAlignmentAndOffset(const CastExpr *CE, QualType DerivedType,
+ CharUnits BaseAlignment, CharUnits Offset,
+ ASTContext &Ctx) {
+ for (auto PathI = CE->path_begin(), PathE = CE->path_end(); PathI != PathE;
+ ++PathI) {
+ const CXXBaseSpecifier *Base = *PathI;
+ const CXXRecordDecl *BaseDecl = Base->getType()->getAsCXXRecordDecl();
+ if (Base->isVirtual()) {
+ // The complete object may have a lower alignment than the non-virtual
+ // alignment of the base, in which case the base may be misaligned. Choose
+ // the smaller of the non-virtual alignment and BaseAlignment, which is a
+ // conservative lower bound of the complete object alignment.
+ CharUnits NonVirtualAlignment =
+ Ctx.getASTRecordLayout(BaseDecl).getNonVirtualAlignment();
+ BaseAlignment = std::min(BaseAlignment, NonVirtualAlignment);
+ Offset = CharUnits::Zero();
+ } else {
+ const ASTRecordLayout &RL =
+ Ctx.getASTRecordLayout(DerivedType->getAsCXXRecordDecl());
+ Offset += RL.getBaseClassOffset(BaseDecl);
+ }
+ DerivedType = Base->getType();
+ }
+
+ return std::make_pair(BaseAlignment, Offset);
+}
- if (const auto *ME = dyn_cast<MemberExpr>(E))
- return Context.getDeclAlign(ME->getMemberDecl());
+/// Compute the alignment and offset of a binary additive operator.
+static Optional<std::pair<CharUnits, CharUnits>>
+getAlignmentAndOffsetFromBinAddOrSub(const Expr *PtrE, const Expr *IntE,
+ bool IsSub, ASTContext &Ctx) {
+ QualType PointeeType = PtrE->getType()->getPointeeType();
- return TypeAlign;
+ if (!PointeeType->isConstantSizeType())
+ return llvm::None;
+
+ auto P = getBaseAlignmentAndOffsetFromPtr(PtrE, Ctx);
+
+ if (!P)
+ return llvm::None;
+
+ llvm::APSInt IdxRes;
+ CharUnits EltSize = Ctx.getTypeSizeInChars(PointeeType);
+ if (IntE->isIntegerConstantExpr(IdxRes, Ctx)) {
+ CharUnits Offset = EltSize * IdxRes.getExtValue();
+ if (IsSub)
+ Offset = -Offset;
+ return std::make_pair(P->first, P->second + Offset);
+ }
+
+ // If the integer expression isn't a constant expression, compute the lower
+ // bound of the alignment using the alignment and offset of the pointer
+ // expression and the element size.
+ return std::make_pair(
+ P->first.alignmentAtOffset(P->second).alignmentAtOffset(EltSize),
+ CharUnits::Zero());
+}
+
+/// This helper function takes an lvalue expression and returns the alignment of
+/// a VarDecl and a constant offset from the VarDecl.
+Optional<std::pair<CharUnits, CharUnits>>
+static getBaseAlignmentAndOffsetFromLValue(const Expr *E, ASTContext &Ctx) {
+ E = E->IgnoreParens();
+ switch (E->getStmtClass()) {
+ default:
+ break;
+ case Stmt::CStyleCastExprClass:
+ case Stmt::CXXStaticCastExprClass:
+ case Stmt::ImplicitCastExprClass: {
+ auto *CE = cast<CastExpr>(E);
+ const Expr *From = CE->getSubExpr();
+ switch (CE->getCastKind()) {
+ default:
+ break;
+ case CK_NoOp:
+ return getBaseAlignmentAndOffsetFromLValue(From, Ctx);
+ case CK_UncheckedDerivedToBase:
+ case CK_DerivedToBase: {
+ auto P = getBaseAlignmentAndOffsetFromLValue(From, Ctx);
+ if (!P)
+ break;
+ return getDerivedToBaseAlignmentAndOffset(CE, From->getType(), P->first,
+ P->second, Ctx);
+ }
+ }
+ break;
+ }
+ case Stmt::ArraySubscriptExprClass: {
+ auto *ASE = cast<ArraySubscriptExpr>(E);
+ return getAlignmentAndOffsetFromBinAddOrSub(ASE->getBase(), ASE->getIdx(),
+ false, Ctx);
+ }
+ case Stmt::DeclRefExprClass: {
+ if (auto *VD = dyn_cast<VarDecl>(cast<DeclRefExpr>(E)->getDecl())) {
+ // FIXME: If VD is captured by copy or is an escaping __block variable,
+ // use the alignment of VD's type.
+ if (!VD->getType()->isReferenceType())
+ return std::make_pair(Ctx.getDeclAlign(VD), CharUnits::Zero());
+ if (VD->hasInit())
+ return getBaseAlignmentAndOffsetFromLValue(VD->getInit(), Ctx);
+ }
+ break;
+ }
+ case Stmt::MemberExprClass: {
+ auto *ME = cast<MemberExpr>(E);
+ auto *FD = dyn_cast<FieldDecl>(ME->getMemberDecl());
+ if (!FD || FD->getType()->isReferenceType())
+ break;
+ Optional<std::pair<CharUnits, CharUnits>> P;
+ if (ME->isArrow())
+ P = getBaseAlignmentAndOffsetFromPtr(ME->getBase(), Ctx);
+ else
+ P = getBaseAlignmentAndOffsetFromLValue(ME->getBase(), Ctx);
+ if (!P)
+ break;
+ const ASTRecordLayout &Layout = Ctx.getASTRecordLayout(FD->getParent());
+ uint64_t Offset = Layout.getFieldOffset(FD->getFieldIndex());
+ return std::make_pair(P->first,
+ P->second + CharUnits::fromQuantity(Offset));
+ }
+ case Stmt::UnaryOperatorClass: {
+ auto *UO = cast<UnaryOperator>(E);
+ switch (UO->getOpcode()) {
+ default:
+ break;
+ case UO_Deref:
+ return getBaseAlignmentAndOffsetFromPtr(UO->getSubExpr(), Ctx);
+ }
+ break;
+ }
+ case Stmt::BinaryOperatorClass: {
+ auto *BO = cast<BinaryOperator>(E);
+ auto Opcode = BO->getOpcode();
+ switch (Opcode) {
+ default:
+ break;
+ case BO_Comma:
+ return getBaseAlignmentAndOffsetFromLValue(BO->getRHS(), Ctx);
+ }
+ break;
+ }
+ }
+ return llvm::None;
+}
+
+/// This helper function takes a pointer expression and returns the alignment of
+/// a VarDecl and a constant offset from the VarDecl.
+Optional<std::pair<CharUnits, CharUnits>>
+static getBaseAlignmentAndOffsetFromPtr(const Expr *E, ASTContext &Ctx) {
+ E = E->IgnoreParens();
+ switch (E->getStmtClass()) {
+ default:
+ break;
+ case Stmt::CStyleCastExprClass:
+ case Stmt::CXXStaticCastExprClass:
+ case Stmt::ImplicitCastExprClass: {
+ auto *CE = cast<CastExpr>(E);
+ const Expr *From = CE->getSubExpr();
+ switch (CE->getCastKind()) {
+ default:
+ break;
+ case CK_NoOp:
+ return getBaseAlignmentAndOffsetFromPtr(From, Ctx);
+ case CK_ArrayToPointerDecay:
+ return getBaseAlignmentAndOffsetFromLValue(From, Ctx);
+ case CK_UncheckedDerivedToBase:
+ case CK_DerivedToBase: {
+ auto P = getBaseAlignmentAndOffsetFromPtr(From, Ctx);
+ if (!P)
+ break;
+ return getDerivedToBaseAlignmentAndOffset(
+ CE, From->getType()->getPointeeType(), P->first, P->second, Ctx);
+ }
+ }
+ break;
+ }
+ case Stmt::CXXThisExprClass: {
+ auto *RD = E->getType()->getPointeeType()->getAsCXXRecordDecl();
+ CharUnits Alignment = Ctx.getASTRecordLayout(RD).getNonVirtualAlignment();
+ return std::make_pair(Alignment, CharUnits::Zero());
+ }
+ case Stmt::UnaryOperatorClass: {
+ auto *UO = cast<UnaryOperator>(E);
+ if (UO->getOpcode() == UO_AddrOf)
+ return getBaseAlignmentAndOffsetFromLValue(UO->getSubExpr(), Ctx);
+ break;
+ }
+ case Stmt::BinaryOperatorClass: {
+ auto *BO = cast<BinaryOperator>(E);
+ auto Opcode = BO->getOpcode();
+ switch (Opcode) {
+ default:
+ break;
+ case BO_Add:
+ case BO_Sub: {
+ const Expr *LHS = BO->getLHS(), *RHS = BO->getRHS();
+ if (Opcode == BO_Add && !RHS->getType()->isIntegralOrEnumerationType())
+ std::swap(LHS, RHS);
+ return getAlignmentAndOffsetFromBinAddOrSub(LHS, RHS, Opcode == BO_Sub,
+ Ctx);
+ }
+ case BO_Comma:
+ return getBaseAlignmentAndOffsetFromPtr(BO->getRHS(), Ctx);
+ }
+ break;
+ }
+ }
+ return llvm::None;
+}
+
+static CharUnits getPresumedAlignmentOfPointer(const Expr *E, Sema &S) {
+ // See if we can compute the alignment of a VarDecl and an offset from it.
+ Optional<std::pair<CharUnits, CharUnits>> P =
+ getBaseAlignmentAndOffsetFromPtr(E, S.Context);
+
+ if (P)
+ return P->first.alignmentAtOffset(P->second);
+
+ // If that failed, return the type's alignment.
+ return S.Context.getTypeAlignInChars(E->getType()->getPointeeType());
}
/// CheckCastAlign - Implements -Wcast-align, which warns when a
@@ -13420,21 +13713,13 @@ void Sema::CheckCastAlign(Expr *Op, QualType T, SourceRange TRange) {
if (!SrcPtr) return;
QualType SrcPointee = SrcPtr->getPointeeType();
- // Whitelist casts from cv void*. We already implicitly
- // whitelisted casts to cv void*, since they have alignment 1.
- // Also whitelist casts involving incomplete types, which implicitly
+ // Explicitly allow casts from cv void*. We already implicitly
+ // allowed casts to cv void*, since they have alignment 1.
+ // Also allow casts involving incomplete types, which implicitly
// includes 'void'.
if (SrcPointee->isIncompleteType()) return;
- CharUnits SrcAlign = Context.getTypeAlignInChars(SrcPointee);
-
- if (auto *CE = dyn_cast<CastExpr>(Op)) {
- if (CE->getCastKind() == CK_ArrayToPointerDecay)
- SrcAlign = getDeclAlign(CE->getSubExpr(), SrcAlign, Context);
- } else if (auto *UO = dyn_cast<UnaryOperator>(Op)) {
- if (UO->getOpcode() == UO_AddrOf)
- SrcAlign = getDeclAlign(UO->getSubExpr(), SrcAlign, Context);
- }
+ CharUnits SrcAlign = getPresumedAlignmentOfPointer(Op, *this);
if (SrcAlign >= DestAlign) return;
@@ -13916,7 +14201,7 @@ static bool isSetterLikeSelector(Selector sel) {
if (str.startswith("set"))
str = str.substr(3);
else if (str.startswith("add")) {
- // Specially whitelist 'addOperationWithBlock:'.
+ // Specially allow 'addOperationWithBlock:'.
if (sel.getNumArgs() == 1 && str.startswith("addOperationWithBlock"))
return false;
str = str.substr(3);
@@ -14242,12 +14527,12 @@ void Sema::checkUnsafeExprAssigns(SourceLocation Loc,
return;
unsigned Attributes = PD->getPropertyAttributes();
- if (Attributes & ObjCPropertyDecl::OBJC_PR_assign) {
+ if (Attributes & ObjCPropertyAttribute::kind_assign) {
// when 'assign' attribute was not explicitly specified
// by user, ignore it and rely on property type itself
// for lifetime info.
unsigned AsWrittenAttr = PD->getPropertyAttributesAsWritten();
- if (!(AsWrittenAttr & ObjCPropertyDecl::OBJC_PR_assign) &&
+ if (!(AsWrittenAttr & ObjCPropertyAttribute::kind_assign) &&
LHSType->isObjCRetainableType())
return;
@@ -14259,8 +14544,7 @@ void Sema::checkUnsafeExprAssigns(SourceLocation Loc,
}
RHS = cast->getSubExpr();
}
- }
- else if (Attributes & ObjCPropertyDecl::OBJC_PR_weak) {
+ } else if (Attributes & ObjCPropertyAttribute::kind_weak) {
if (checkUnsafeAssignObject(*this, Loc, Qualifiers::OCL_Weak, RHS, true))
return;
}
@@ -15045,3 +15329,259 @@ void Sema::CheckAddressOfPackedMember(Expr *rhs) {
rhs, std::bind(&Sema::AddPotentialMisalignedMembers, std::ref(*this), _1,
_2, _3, _4));
}
+
+ExprResult Sema::SemaBuiltinMatrixTranspose(CallExpr *TheCall,
+ ExprResult CallResult) {
+ if (checkArgCount(*this, TheCall, 1))
+ return ExprError();
+
+ ExprResult MatrixArg = DefaultLvalueConversion(TheCall->getArg(0));
+ if (MatrixArg.isInvalid())
+ return MatrixArg;
+ Expr *Matrix = MatrixArg.get();
+
+ auto *MType = Matrix->getType()->getAs<ConstantMatrixType>();
+ if (!MType) {
+ Diag(Matrix->getBeginLoc(), diag::err_builtin_matrix_arg);
+ return ExprError();
+ }
+
+ // Create returned matrix type by swapping rows and columns of the argument
+ // matrix type.
+ QualType ResultType = Context.getConstantMatrixType(
+ MType->getElementType(), MType->getNumColumns(), MType->getNumRows());
+
+ // Change the return type to the type of the returned matrix.
+ TheCall->setType(ResultType);
+
+ // Update call argument to use the possibly converted matrix argument.
+ TheCall->setArg(0, Matrix);
+ return CallResult;
+}
+
+// Get and verify the matrix dimensions.
+static llvm::Optional<unsigned>
+getAndVerifyMatrixDimension(Expr *Expr, StringRef Name, Sema &S) {
+ llvm::APSInt Value(64);
+ SourceLocation ErrorPos;
+ if (!Expr->isIntegerConstantExpr(Value, S.Context, &ErrorPos)) {
+ S.Diag(Expr->getBeginLoc(), diag::err_builtin_matrix_scalar_unsigned_arg)
+ << Name;
+ return {};
+ }
+ uint64_t Dim = Value.getZExtValue();
+ if (!ConstantMatrixType::isDimensionValid(Dim)) {
+ S.Diag(Expr->getBeginLoc(), diag::err_builtin_matrix_invalid_dimension)
+ << Name << ConstantMatrixType::getMaxElementsPerDimension();
+ return {};
+ }
+ return Dim;
+}
+
+ExprResult Sema::SemaBuiltinMatrixColumnMajorLoad(CallExpr *TheCall,
+ ExprResult CallResult) {
+ if (!getLangOpts().MatrixTypes) {
+ Diag(TheCall->getBeginLoc(), diag::err_builtin_matrix_disabled);
+ return ExprError();
+ }
+
+ if (checkArgCount(*this, TheCall, 4))
+ return ExprError();
+
+ unsigned PtrArgIdx = 0;
+ Expr *PtrExpr = TheCall->getArg(PtrArgIdx);
+ Expr *RowsExpr = TheCall->getArg(1);
+ Expr *ColumnsExpr = TheCall->getArg(2);
+ Expr *StrideExpr = TheCall->getArg(3);
+
+ bool ArgError = false;
+
+ // Check pointer argument.
+ {
+ ExprResult PtrConv = DefaultFunctionArrayLvalueConversion(PtrExpr);
+ if (PtrConv.isInvalid())
+ return PtrConv;
+ PtrExpr = PtrConv.get();
+ TheCall->setArg(0, PtrExpr);
+ if (PtrExpr->isTypeDependent()) {
+ TheCall->setType(Context.DependentTy);
+ return TheCall;
+ }
+ }
+
+ auto *PtrTy = PtrExpr->getType()->getAs<PointerType>();
+ QualType ElementTy;
+ if (!PtrTy) {
+ Diag(PtrExpr->getBeginLoc(), diag::err_builtin_matrix_pointer_arg)
+ << PtrArgIdx + 1;
+ ArgError = true;
+ } else {
+ ElementTy = PtrTy->getPointeeType().getUnqualifiedType();
+
+ if (!ConstantMatrixType::isValidElementType(ElementTy)) {
+ Diag(PtrExpr->getBeginLoc(), diag::err_builtin_matrix_pointer_arg)
+ << PtrArgIdx + 1;
+ ArgError = true;
+ }
+ }
+
+ // Apply default Lvalue conversions and convert the expression to size_t.
+ auto ApplyArgumentConversions = [this](Expr *E) {
+ ExprResult Conv = DefaultLvalueConversion(E);
+ if (Conv.isInvalid())
+ return Conv;
+
+ return tryConvertExprToType(Conv.get(), Context.getSizeType());
+ };
+
+ // Apply conversion to row and column expressions.
+ ExprResult RowsConv = ApplyArgumentConversions(RowsExpr);
+ if (!RowsConv.isInvalid()) {
+ RowsExpr = RowsConv.get();
+ TheCall->setArg(1, RowsExpr);
+ } else
+ RowsExpr = nullptr;
+
+ ExprResult ColumnsConv = ApplyArgumentConversions(ColumnsExpr);
+ if (!ColumnsConv.isInvalid()) {
+ ColumnsExpr = ColumnsConv.get();
+ TheCall->setArg(2, ColumnsExpr);
+ } else
+ ColumnsExpr = nullptr;
+
+ // If any any part of the result matrix type is still pending, just use
+ // Context.DependentTy, until all parts are resolved.
+ if ((RowsExpr && RowsExpr->isTypeDependent()) ||
+ (ColumnsExpr && ColumnsExpr->isTypeDependent())) {
+ TheCall->setType(Context.DependentTy);
+ return CallResult;
+ }
+
+ // Check row and column dimenions.
+ llvm::Optional<unsigned> MaybeRows;
+ if (RowsExpr)
+ MaybeRows = getAndVerifyMatrixDimension(RowsExpr, "row", *this);
+
+ llvm::Optional<unsigned> MaybeColumns;
+ if (ColumnsExpr)
+ MaybeColumns = getAndVerifyMatrixDimension(ColumnsExpr, "column", *this);
+
+ // Check stride argument.
+ ExprResult StrideConv = ApplyArgumentConversions(StrideExpr);
+ if (StrideConv.isInvalid())
+ return ExprError();
+ StrideExpr = StrideConv.get();
+ TheCall->setArg(3, StrideExpr);
+
+ llvm::APSInt Value(64);
+ if (MaybeRows && StrideExpr->isIntegerConstantExpr(Value, Context)) {
+ uint64_t Stride = Value.getZExtValue();
+ if (Stride < *MaybeRows) {
+ Diag(StrideExpr->getBeginLoc(),
+ diag::err_builtin_matrix_stride_too_small);
+ ArgError = true;
+ }
+ }
+
+ if (ArgError || !MaybeRows || !MaybeColumns)
+ return ExprError();
+
+ TheCall->setType(
+ Context.getConstantMatrixType(ElementTy, *MaybeRows, *MaybeColumns));
+ return CallResult;
+}
+
+ExprResult Sema::SemaBuiltinMatrixColumnMajorStore(CallExpr *TheCall,
+ ExprResult CallResult) {
+ if (checkArgCount(*this, TheCall, 3))
+ return ExprError();
+
+ unsigned PtrArgIdx = 1;
+ Expr *MatrixExpr = TheCall->getArg(0);
+ Expr *PtrExpr = TheCall->getArg(PtrArgIdx);
+ Expr *StrideExpr = TheCall->getArg(2);
+
+ bool ArgError = false;
+
+ {
+ ExprResult MatrixConv = DefaultLvalueConversion(MatrixExpr);
+ if (MatrixConv.isInvalid())
+ return MatrixConv;
+ MatrixExpr = MatrixConv.get();
+ TheCall->setArg(0, MatrixExpr);
+ }
+ if (MatrixExpr->isTypeDependent()) {
+ TheCall->setType(Context.DependentTy);
+ return TheCall;
+ }
+
+ auto *MatrixTy = MatrixExpr->getType()->getAs<ConstantMatrixType>();
+ if (!MatrixTy) {
+ Diag(MatrixExpr->getBeginLoc(), diag::err_builtin_matrix_arg) << 0;
+ ArgError = true;
+ }
+
+ {
+ ExprResult PtrConv = DefaultFunctionArrayLvalueConversion(PtrExpr);
+ if (PtrConv.isInvalid())
+ return PtrConv;
+ PtrExpr = PtrConv.get();
+ TheCall->setArg(1, PtrExpr);
+ if (PtrExpr->isTypeDependent()) {
+ TheCall->setType(Context.DependentTy);
+ return TheCall;
+ }
+ }
+
+ // Check pointer argument.
+ auto *PtrTy = PtrExpr->getType()->getAs<PointerType>();
+ if (!PtrTy) {
+ Diag(PtrExpr->getBeginLoc(), diag::err_builtin_matrix_pointer_arg)
+ << PtrArgIdx + 1;
+ ArgError = true;
+ } else {
+ QualType ElementTy = PtrTy->getPointeeType();
+ if (ElementTy.isConstQualified()) {
+ Diag(PtrExpr->getBeginLoc(), diag::err_builtin_matrix_store_to_const);
+ ArgError = true;
+ }
+ ElementTy = ElementTy.getUnqualifiedType().getCanonicalType();
+ if (MatrixTy &&
+ !Context.hasSameType(ElementTy, MatrixTy->getElementType())) {
+ Diag(PtrExpr->getBeginLoc(),
+ diag::err_builtin_matrix_pointer_arg_mismatch)
+ << ElementTy << MatrixTy->getElementType();
+ ArgError = true;
+ }
+ }
+
+ // Apply default Lvalue conversions and convert the stride expression to
+ // size_t.
+ {
+ ExprResult StrideConv = DefaultLvalueConversion(StrideExpr);
+ if (StrideConv.isInvalid())
+ return StrideConv;
+
+ StrideConv = tryConvertExprToType(StrideConv.get(), Context.getSizeType());
+ if (StrideConv.isInvalid())
+ return StrideConv;
+ StrideExpr = StrideConv.get();
+ TheCall->setArg(2, StrideExpr);
+ }
+
+ // Check stride argument.
+ llvm::APSInt Value(64);
+ if (MatrixTy && StrideExpr->isIntegerConstantExpr(Value, Context)) {
+ uint64_t Stride = Value.getZExtValue();
+ if (Stride < MatrixTy->getNumRows()) {
+ Diag(StrideExpr->getBeginLoc(),
+ diag::err_builtin_matrix_stride_too_small);
+ ArgError = true;
+ }
+ }
+
+ if (ArgError)
+ return ExprError();
+
+ return CallResult;
+}