summaryrefslogtreecommitdiff
path: root/llvm/lib/Target
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Target')
-rw-r--r--llvm/lib/Target/AArch64/MCTargetDesc/AArch64WinCOFFObjectWriter.cpp28
-rw-r--r--llvm/lib/Target/AMDGPU/AMDGPUAnnotateUniformValues.cpp16
-rw-r--r--llvm/lib/Target/PowerPC/PPCInstrInfo.cpp9
-rw-r--r--llvm/lib/Target/PowerPC/PPCInstrInfo.h14
4 files changed, 57 insertions, 10 deletions
diff --git a/llvm/lib/Target/AArch64/MCTargetDesc/AArch64WinCOFFObjectWriter.cpp b/llvm/lib/Target/AArch64/MCTargetDesc/AArch64WinCOFFObjectWriter.cpp
index aa50bd05cb71..aaadc8dc1b60 100644
--- a/llvm/lib/Target/AArch64/MCTargetDesc/AArch64WinCOFFObjectWriter.cpp
+++ b/llvm/lib/Target/AArch64/MCTargetDesc/AArch64WinCOFFObjectWriter.cpp
@@ -11,6 +11,7 @@
#include "llvm/ADT/Twine.h"
#include "llvm/BinaryFormat/COFF.h"
#include "llvm/MC/MCAsmBackend.h"
+#include "llvm/MC/MCContext.h"
#include "llvm/MC/MCExpr.h"
#include "llvm/MC/MCFixup.h"
#include "llvm/MC/MCFixupKindInfo.h"
@@ -48,10 +49,33 @@ unsigned AArch64WinCOFFObjectWriter::getRelocType(
: Target.getSymA()->getKind();
const MCExpr *Expr = Fixup.getValue();
+ if (const AArch64MCExpr *A64E = dyn_cast<AArch64MCExpr>(Expr)) {
+ AArch64MCExpr::VariantKind RefKind = A64E->getKind();
+ switch (AArch64MCExpr::getSymbolLoc(RefKind)) {
+ case AArch64MCExpr::VK_ABS:
+ case AArch64MCExpr::VK_SECREL:
+ // Supported
+ break;
+ default:
+ Ctx.reportError(Fixup.getLoc(), "relocation variant " +
+ A64E->getVariantKindName() +
+ " unsupported on COFF targets");
+ return COFF::IMAGE_REL_ARM64_ABSOLUTE; // Dummy return value
+ }
+ }
+
switch (static_cast<unsigned>(Fixup.getKind())) {
default: {
- const MCFixupKindInfo &Info = MAB.getFixupKindInfo(Fixup.getKind());
- report_fatal_error(Twine("unsupported relocation type: ") + Info.Name);
+ if (const AArch64MCExpr *A64E = dyn_cast<AArch64MCExpr>(Expr)) {
+ Ctx.reportError(Fixup.getLoc(), "relocation type " +
+ A64E->getVariantKindName() +
+ " unsupported on COFF targets");
+ } else {
+ const MCFixupKindInfo &Info = MAB.getFixupKindInfo(Fixup.getKind());
+ Ctx.reportError(Fixup.getLoc(), Twine("relocation type ") + Info.Name +
+ " unsupported on COFF targets");
+ }
+ return COFF::IMAGE_REL_ARM64_ABSOLUTE; // Dummy return value
}
case FK_Data_4:
diff --git a/llvm/lib/Target/AMDGPU/AMDGPUAnnotateUniformValues.cpp b/llvm/lib/Target/AMDGPU/AMDGPUAnnotateUniformValues.cpp
index b09e92c07f9b..45f515c5115e 100644
--- a/llvm/lib/Target/AMDGPU/AMDGPUAnnotateUniformValues.cpp
+++ b/llvm/lib/Target/AMDGPU/AMDGPUAnnotateUniformValues.cpp
@@ -131,10 +131,20 @@ void AMDGPUAnnotateUniformValues::visitLoadInst(LoadInst &I) {
// We're tracking up to the Function boundaries, and cannot go beyond because
// of FunctionPass restrictions. We can ensure that is memory not clobbered
// for memory operations that are live in to entry points only.
- bool NotClobbered = isEntryFunc && !isClobberedInFunction(&I);
Instruction *PtrI = dyn_cast<Instruction>(Ptr);
- if (!PtrI && NotClobbered && isGlobalLoad(I)) {
- if (isa<Argument>(Ptr) || isa<GlobalValue>(Ptr)) {
+
+ if (!isEntryFunc) {
+ if (PtrI)
+ setUniformMetadata(PtrI);
+ return;
+ }
+
+ bool NotClobbered = false;
+ if (PtrI)
+ NotClobbered = !isClobberedInFunction(&I);
+ else if (isa<Argument>(Ptr) || isa<GlobalValue>(Ptr)) {
+ if (isGlobalLoad(I) && !isClobberedInFunction(&I)) {
+ NotClobbered = true;
// Lookup for the existing GEP
if (noClobberClones.count(Ptr)) {
PtrI = noClobberClones[Ptr];
diff --git a/llvm/lib/Target/PowerPC/PPCInstrInfo.cpp b/llvm/lib/Target/PowerPC/PPCInstrInfo.cpp
index 11c97210ead9..9a4c57fedac2 100644
--- a/llvm/lib/Target/PowerPC/PPCInstrInfo.cpp
+++ b/llvm/lib/Target/PowerPC/PPCInstrInfo.cpp
@@ -2655,6 +2655,15 @@ const unsigned *PPCInstrInfo::getLoadOpcodesForSpillArray() const {
void PPCInstrInfo::fixupIsDeadOrKill(MachineInstr &StartMI, MachineInstr &EndMI,
unsigned RegNo) const {
+ // Conservatively clear kill flag for the register if the instructions are in
+ // different basic blocks and in SSA form, because the kill flag may no longer
+ // be right. There is no need to bother with dead flags since defs with no
+ // uses will be handled by DCE.
+ MachineRegisterInfo &MRI = StartMI.getParent()->getParent()->getRegInfo();
+ if (MRI.isSSA() && (StartMI.getParent() != EndMI.getParent())) {
+ MRI.clearKillFlags(RegNo);
+ return;
+ }
// Instructions between [StartMI, EndMI] should be in same basic block.
assert((StartMI.getParent() == EndMI.getParent()) &&
diff --git a/llvm/lib/Target/PowerPC/PPCInstrInfo.h b/llvm/lib/Target/PowerPC/PPCInstrInfo.h
index d98597f48340..43973c627fcf 100644
--- a/llvm/lib/Target/PowerPC/PPCInstrInfo.h
+++ b/llvm/lib/Target/PowerPC/PPCInstrInfo.h
@@ -565,14 +565,18 @@ public:
int64_t OffsetImm) const;
/// Fixup killed/dead flag for register \p RegNo between instructions [\p
- /// StartMI, \p EndMI]. Some PostRA transformations may violate register
- /// killed/dead flags semantics, this function can be called to fix up. Before
- /// calling this function,
+ /// StartMI, \p EndMI]. Some pre-RA or post-RA transformations may violate
+ /// register killed/dead flags semantics, this function can be called to fix
+ /// up. Before calling this function,
/// 1. Ensure that \p RegNo liveness is killed after instruction \p EndMI.
/// 2. Ensure that there is no new definition between (\p StartMI, \p EndMI)
/// and possible definition for \p RegNo is \p StartMI or \p EndMI.
- /// 3. Ensure that all instructions between [\p StartMI, \p EndMI] are in same
- /// basic block.
+ /// 3. We can do accurate fixup for the case when all instructions between
+ /// [\p StartMI, \p EndMI] are in same basic block.
+ /// 4. For the case when \p StartMI and \p EndMI are not in same basic block,
+ /// we conservatively clear kill flag for all uses of \p RegNo for pre-RA
+ /// and for post-RA, we give an assertion as without reaching definition
+ /// analysis post-RA, \p StartMI and \p EndMI are hard to keep right.
void fixupIsDeadOrKill(MachineInstr &StartMI, MachineInstr &EndMI,
unsigned RegNo) const;
void replaceInstrWithLI(MachineInstr &MI, const LoadImmediateInfo &LII) const;