aboutsummaryrefslogtreecommitdiff
path: root/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp')
-rw-r--r--contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp14
1 files changed, 14 insertions, 0 deletions
diff --git a/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp b/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp
index a6900b8963bb..30ef3680ae79 100644
--- a/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp
+++ b/contrib/llvm-project/llvm/lib/Target/AArch64/MCTargetDesc/AArch64AsmBackend.cpp
@@ -67,6 +67,7 @@ public:
{"fixup_aarch64_ldr_pcrel_imm19", 5, 19, PCRelFlagVal},
{"fixup_aarch64_movw", 5, 16, 0},
{"fixup_aarch64_pcrel_branch14", 5, 14, PCRelFlagVal},
+ {"fixup_aarch64_pcrel_branch16", 5, 16, PCRelFlagVal},
{"fixup_aarch64_pcrel_branch19", 5, 19, PCRelFlagVal},
{"fixup_aarch64_pcrel_branch26", 0, 26, PCRelFlagVal},
{"fixup_aarch64_pcrel_call26", 0, 26, PCRelFlagVal}};
@@ -121,6 +122,7 @@ static unsigned getFixupKindNumBytes(unsigned Kind) {
case AArch64::fixup_aarch64_movw:
case AArch64::fixup_aarch64_pcrel_branch14:
+ case AArch64::fixup_aarch64_pcrel_branch16:
case AArch64::fixup_aarch64_add_imm12:
case AArch64::fixup_aarch64_ldst_imm12_scale1:
case AArch64::fixup_aarch64_ldst_imm12_scale2:
@@ -314,6 +316,17 @@ static uint64_t adjustFixupValue(const MCFixup &Fixup, const MCValue &Target,
if (Value & 0x3)
Ctx.reportError(Fixup.getLoc(), "fixup not sufficiently aligned");
return (Value >> 2) & 0x3fff;
+ case AArch64::fixup_aarch64_pcrel_branch16:
+ // Unsigned PC-relative offset, so invert the negative immediate.
+ SignedValue = -SignedValue;
+ Value = static_cast<uint64_t>(SignedValue);
+ // Check valid 18-bit unsigned range.
+ if (SignedValue < 0 || SignedValue > ((1 << 18) - 1))
+ Ctx.reportError(Fixup.getLoc(), "fixup value out of range");
+ // Low two bits are not encoded (4-byte alignment assumed).
+ if (Value & 0b11)
+ Ctx.reportError(Fixup.getLoc(), "fixup not sufficiently aligned");
+ return (Value >> 2) & 0xffff;
case AArch64::fixup_aarch64_pcrel_branch26:
case AArch64::fixup_aarch64_pcrel_call26:
if (TheTriple.isOSBinFormatCOFF() && !IsResolved && SignedValue != 0) {
@@ -380,6 +393,7 @@ unsigned AArch64AsmBackend::getFixupKindContainereSizeInBytes(unsigned Kind) con
case AArch64::fixup_aarch64_movw:
case AArch64::fixup_aarch64_pcrel_branch14:
+ case AArch64::fixup_aarch64_pcrel_branch16:
case AArch64::fixup_aarch64_add_imm12:
case AArch64::fixup_aarch64_ldst_imm12_scale1:
case AArch64::fixup_aarch64_ldst_imm12_scale2: