In fact this default implementation should be the only implementation,
keep it virtual for now to accomodate targets that don't model flags
correctly.
Differential Revision: https://reviews.llvm.org/D30747
git-svn-id: https://llvm.org/svn/llvm-project/llvm/trunk@297980
91177308-0d34-0410-b5e6-
96231b3b80d8
return None;
}
- /// Determines whether |Inst| is a tail call instruction.
+ /// Determines whether \p Inst is a tail call instruction. Override this
+ /// method on targets that do not properly set MCID::Return and MCID::Call on
+ /// tail call instructions."
virtual bool isTailCall(const MachineInstr &Inst) const {
- return false;
+ return Inst.isReturn() && Inst.isCall();
}
/// True if the instruction is bound to the top of its basic block and no
return isUnscaledLdSt(MI.getOpcode());
}
-bool AArch64InstrInfo::isTailCall(const MachineInstr &Inst) const
-{
- switch (Inst.getOpcode()) {
- case AArch64::TCRETURNdi:
- case AArch64::TCRETURNri:
- return true;
- default:
- return false;
- }
-}
-
// Is this a candidate for ld/st merging or pairing? For example, we don't
// touch volatiles or load/stores that have a hint to avoid pair formation.
bool AArch64InstrInfo::isCandidateToMergeOrPair(MachineInstr &MI) const {
/// Return true if this is an unscaled load/store.
bool isUnscaledLdSt(MachineInstr &MI) const;
- bool isTailCall(const MachineInstr &Inst) const override;
-
static bool isPairableLdStInst(const MachineInstr &MI) {
switch (MI.getOpcode()) {
default:
return Subtarget.getFeatureBits()[ARM::HasV6KOps];
}
-bool ARMBaseInstrInfo::isTailCall(const MachineInstr &Inst) const
-{
- switch (Inst.getOpcode()) {
- case ARM::TAILJMPd:
- case ARM::TAILJMPr:
- case ARM::TCRETURNdi:
- case ARM::TCRETURNri:
- return true;
- default:
- return false;
- }
-}
-
bool ARMBaseInstrInfo::isSwiftFastImmShift(const MachineInstr *MI) const {
if (MI->getNumOperands() < 4)
return true;
getNoopForMachoTarget(NopInst);
}
- bool isTailCall(const MachineInstr &Inst) const override;
-
// Return the non-pre/post incrementing version of 'Opc'. Return 0
// if there is not such an opcode.
virtual unsigned getUnindexedOpcode(unsigned Opc) const = 0;
return makeArrayRef(TargetFlags);
}
-bool X86InstrInfo::isTailCall(const MachineInstr &Inst) const {
- switch (Inst.getOpcode()) {
- case X86::TCRETURNdi:
- case X86::TCRETURNmi:
- case X86::TCRETURNri:
- case X86::TCRETURNdi64:
- case X86::TCRETURNmi64:
- case X86::TCRETURNri64:
- case X86::TAILJMPd:
- case X86::TAILJMPm:
- case X86::TAILJMPr:
- case X86::TAILJMPd64:
- case X86::TAILJMPm64:
- case X86::TAILJMPr64:
- case X86::TAILJMPm64_REX:
- case X86::TAILJMPr64_REX:
- return true;
- default:
- return false;
- }
-}
-
namespace {
/// Create Global Base Reg pass. This initializes the PIC
/// global base register for x86-32.
ArrayRef<std::pair<unsigned, const char *>>
getSerializableDirectMachineOperandTargetFlags() const override;
- bool isTailCall(const MachineInstr &Inst) const override;
-
unsigned getOutliningBenefit(size_t SequenceSize,
size_t Occurrences,
bool CanBeTailCall) const override;