167c32a98SDimitry Andric //===---- MachineCombiner.cpp - Instcombining on SSA form machine code ----===//
267c32a98SDimitry Andric //
3e6d15924SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4e6d15924SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5e6d15924SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
667c32a98SDimitry Andric //
767c32a98SDimitry Andric //===----------------------------------------------------------------------===//
867c32a98SDimitry Andric //
967c32a98SDimitry Andric // The machine combiner pass uses machine trace metrics to ensure the combined
1071d5a254SDimitry Andric // instructions do not lengthen the critical path or the resource depth.
1167c32a98SDimitry Andric //===----------------------------------------------------------------------===//
12dd58ef01SDimitry Andric
1367c32a98SDimitry Andric #include "llvm/ADT/DenseMap.h"
1401095a5dSDimitry Andric #include "llvm/ADT/Statistic.h"
15706b4fc4SDimitry Andric #include "llvm/Analysis/ProfileSummaryInfo.h"
16706b4fc4SDimitry Andric #include "llvm/CodeGen/LazyMachineBlockFrequencyInfo.h"
17e3b55780SDimitry Andric #include "llvm/CodeGen/MachineCombinerPattern.h"
1867c32a98SDimitry Andric #include "llvm/CodeGen/MachineDominators.h"
1967c32a98SDimitry Andric #include "llvm/CodeGen/MachineFunction.h"
2067c32a98SDimitry Andric #include "llvm/CodeGen/MachineFunctionPass.h"
2167c32a98SDimitry Andric #include "llvm/CodeGen/MachineLoopInfo.h"
2267c32a98SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
23706b4fc4SDimitry Andric #include "llvm/CodeGen/MachineSizeOpts.h"
2467c32a98SDimitry Andric #include "llvm/CodeGen/MachineTraceMetrics.h"
25b60736ecSDimitry Andric #include "llvm/CodeGen/RegisterClassInfo.h"
26044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
27044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
2867c32a98SDimitry Andric #include "llvm/CodeGen/TargetSchedule.h"
29044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetSubtargetInfo.h"
30706b4fc4SDimitry Andric #include "llvm/InitializePasses.h"
31044eb2f6SDimitry Andric #include "llvm/Support/CommandLine.h"
3267c32a98SDimitry Andric #include "llvm/Support/Debug.h"
3367c32a98SDimitry Andric #include "llvm/Support/raw_ostream.h"
3467c32a98SDimitry Andric
3567c32a98SDimitry Andric using namespace llvm;
3667c32a98SDimitry Andric
3793c91e39SDimitry Andric #define DEBUG_TYPE "machine-combiner"
3893c91e39SDimitry Andric
3967c32a98SDimitry Andric STATISTIC(NumInstCombined, "Number of machineinst combined");
4067c32a98SDimitry Andric
41044eb2f6SDimitry Andric static cl::opt<unsigned>
42044eb2f6SDimitry Andric inc_threshold("machine-combiner-inc-threshold", cl::Hidden,
43044eb2f6SDimitry Andric cl::desc("Incremental depth computation will be used for basic "
44044eb2f6SDimitry Andric "blocks with more instructions."), cl::init(500));
45044eb2f6SDimitry Andric
46eb11fae6SDimitry Andric static cl::opt<bool> dump_intrs("machine-combiner-dump-subst-intrs", cl::Hidden,
47eb11fae6SDimitry Andric cl::desc("Dump all substituted intrs"),
48eb11fae6SDimitry Andric cl::init(false));
49eb11fae6SDimitry Andric
50eb11fae6SDimitry Andric #ifdef EXPENSIVE_CHECKS
51eb11fae6SDimitry Andric static cl::opt<bool> VerifyPatternOrder(
52eb11fae6SDimitry Andric "machine-combiner-verify-pattern-order", cl::Hidden,
53eb11fae6SDimitry Andric cl::desc(
54eb11fae6SDimitry Andric "Verify that the generated patterns are ordered by increasing latency"),
55eb11fae6SDimitry Andric cl::init(true));
56eb11fae6SDimitry Andric #else
57eb11fae6SDimitry Andric static cl::opt<bool> VerifyPatternOrder(
58eb11fae6SDimitry Andric "machine-combiner-verify-pattern-order", cl::Hidden,
59eb11fae6SDimitry Andric cl::desc(
60eb11fae6SDimitry Andric "Verify that the generated patterns are ordered by increasing latency"),
61eb11fae6SDimitry Andric cl::init(false));
62eb11fae6SDimitry Andric #endif
63eb11fae6SDimitry Andric
6467c32a98SDimitry Andric namespace {
6567c32a98SDimitry Andric class MachineCombiner : public MachineFunctionPass {
667fa27ce4SDimitry Andric const TargetSubtargetInfo *STI = nullptr;
677fa27ce4SDimitry Andric const TargetInstrInfo *TII = nullptr;
687fa27ce4SDimitry Andric const TargetRegisterInfo *TRI = nullptr;
6967c32a98SDimitry Andric MCSchedModel SchedModel;
707fa27ce4SDimitry Andric MachineRegisterInfo *MRI = nullptr;
717fa27ce4SDimitry Andric MachineLoopInfo *MLI = nullptr; // Current MachineLoopInfo
727fa27ce4SDimitry Andric MachineTraceMetrics *Traces = nullptr;
737fa27ce4SDimitry Andric MachineTraceMetrics::Ensemble *TraceEnsemble = nullptr;
747fa27ce4SDimitry Andric MachineBlockFrequencyInfo *MBFI = nullptr;
757fa27ce4SDimitry Andric ProfileSummaryInfo *PSI = nullptr;
76b60736ecSDimitry Andric RegisterClassInfo RegClassInfo;
7767c32a98SDimitry Andric
7867c32a98SDimitry Andric TargetSchedModel TSchedModel;
7967c32a98SDimitry Andric
805a5ac124SDimitry Andric /// True if optimizing for code size.
817fa27ce4SDimitry Andric bool OptSize = false;
8267c32a98SDimitry Andric
8367c32a98SDimitry Andric public:
8467c32a98SDimitry Andric static char ID;
MachineCombiner()8567c32a98SDimitry Andric MachineCombiner() : MachineFunctionPass(ID) {
8667c32a98SDimitry Andric initializeMachineCombinerPass(*PassRegistry::getPassRegistry());
8767c32a98SDimitry Andric }
8867c32a98SDimitry Andric void getAnalysisUsage(AnalysisUsage &AU) const override;
8967c32a98SDimitry Andric bool runOnMachineFunction(MachineFunction &MF) override;
getPassName() const90b915e9e0SDimitry Andric StringRef getPassName() const override { return "Machine InstCombiner"; }
9167c32a98SDimitry Andric
9267c32a98SDimitry Andric private:
9367c32a98SDimitry Andric bool combineInstructions(MachineBasicBlock *);
9467c32a98SDimitry Andric MachineInstr *getOperandDef(const MachineOperand &MO);
954b4fe385SDimitry Andric bool isTransientMI(const MachineInstr *MI);
9667c32a98SDimitry Andric unsigned getDepth(SmallVectorImpl<MachineInstr *> &InsInstrs,
9767c32a98SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg,
987fa27ce4SDimitry Andric MachineTraceMetrics::Trace BlockTrace,
997fa27ce4SDimitry Andric const MachineBasicBlock &MBB);
10067c32a98SDimitry Andric unsigned getLatency(MachineInstr *Root, MachineInstr *NewRoot,
10167c32a98SDimitry Andric MachineTraceMetrics::Trace BlockTrace);
102ac9a064cSDimitry Andric bool improvesCriticalPathLen(MachineBasicBlock *MBB, MachineInstr *Root,
10367c32a98SDimitry Andric MachineTraceMetrics::Trace BlockTrace,
10467c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
105b915e9e0SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
1061a82d4c0SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg,
107ac9a064cSDimitry Andric unsigned Pattern, bool SlackIsAccurate);
108b60736ecSDimitry Andric bool reduceRegisterPressure(MachineInstr &Root, MachineBasicBlock *MBB,
109b60736ecSDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
110b60736ecSDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
111ac9a064cSDimitry Andric unsigned Pattern);
11267c32a98SDimitry Andric bool preservesResourceLen(MachineBasicBlock *MBB,
11367c32a98SDimitry Andric MachineTraceMetrics::Trace BlockTrace,
11467c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
11567c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs);
11667c32a98SDimitry Andric void instr2instrSC(SmallVectorImpl<MachineInstr *> &Instrs,
11767c32a98SDimitry Andric SmallVectorImpl<const MCSchedClassDesc *> &InstrsSC);
118eb11fae6SDimitry Andric std::pair<unsigned, unsigned>
119eb11fae6SDimitry Andric getLatenciesForInstrSequences(MachineInstr &MI,
120eb11fae6SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
121eb11fae6SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
122eb11fae6SDimitry Andric MachineTraceMetrics::Trace BlockTrace);
123eb11fae6SDimitry Andric
124eb11fae6SDimitry Andric void verifyPatternOrder(MachineBasicBlock *MBB, MachineInstr &Root,
125ac9a064cSDimitry Andric SmallVector<unsigned, 16> &Patterns);
126ac9a064cSDimitry Andric CombinerObjective getCombinerObjective(unsigned Pattern);
12767c32a98SDimitry Andric };
1281a82d4c0SDimitry Andric }
12967c32a98SDimitry Andric
13067c32a98SDimitry Andric char MachineCombiner::ID = 0;
13167c32a98SDimitry Andric char &llvm::MachineCombinerID = MachineCombiner::ID;
13267c32a98SDimitry Andric
133ab44ce3dSDimitry Andric INITIALIZE_PASS_BEGIN(MachineCombiner, DEBUG_TYPE,
13467c32a98SDimitry Andric "Machine InstCombiner", false, false)
INITIALIZE_PASS_DEPENDENCY(MachineLoopInfoWrapperPass)135ac9a064cSDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachineLoopInfoWrapperPass)
13667c32a98SDimitry Andric INITIALIZE_PASS_DEPENDENCY(MachineTraceMetrics)
137ab44ce3dSDimitry Andric INITIALIZE_PASS_END(MachineCombiner, DEBUG_TYPE, "Machine InstCombiner",
13867c32a98SDimitry Andric false, false)
13967c32a98SDimitry Andric
14067c32a98SDimitry Andric void MachineCombiner::getAnalysisUsage(AnalysisUsage &AU) const {
14167c32a98SDimitry Andric AU.setPreservesCFG();
142ac9a064cSDimitry Andric AU.addPreserved<MachineDominatorTreeWrapperPass>();
143ac9a064cSDimitry Andric AU.addRequired<MachineLoopInfoWrapperPass>();
144ac9a064cSDimitry Andric AU.addPreserved<MachineLoopInfoWrapperPass>();
14567c32a98SDimitry Andric AU.addRequired<MachineTraceMetrics>();
14667c32a98SDimitry Andric AU.addPreserved<MachineTraceMetrics>();
147706b4fc4SDimitry Andric AU.addRequired<LazyMachineBlockFrequencyInfoPass>();
148706b4fc4SDimitry Andric AU.addRequired<ProfileSummaryInfoWrapperPass>();
14967c32a98SDimitry Andric MachineFunctionPass::getAnalysisUsage(AU);
15067c32a98SDimitry Andric }
15167c32a98SDimitry Andric
1527fa27ce4SDimitry Andric MachineInstr *
getOperandDef(const MachineOperand & MO)1537fa27ce4SDimitry Andric MachineCombiner::getOperandDef(const MachineOperand &MO) {
15467c32a98SDimitry Andric MachineInstr *DefInstr = nullptr;
15567c32a98SDimitry Andric // We need a virtual register definition.
156e3b55780SDimitry Andric if (MO.isReg() && MO.getReg().isVirtual())
15767c32a98SDimitry Andric DefInstr = MRI->getUniqueVRegDef(MO.getReg());
15867c32a98SDimitry Andric return DefInstr;
15967c32a98SDimitry Andric }
16067c32a98SDimitry Andric
1614b4fe385SDimitry Andric /// Return true if MI is unlikely to generate an actual target instruction.
isTransientMI(const MachineInstr * MI)1624b4fe385SDimitry Andric bool MachineCombiner::isTransientMI(const MachineInstr *MI) {
1634b4fe385SDimitry Andric if (!MI->isCopy())
1644b4fe385SDimitry Andric return MI->isTransient();
1654b4fe385SDimitry Andric
1664b4fe385SDimitry Andric // If MI is a COPY, check if its src and dst registers can be coalesced.
1674b4fe385SDimitry Andric Register Dst = MI->getOperand(0).getReg();
1684b4fe385SDimitry Andric Register Src = MI->getOperand(1).getReg();
1694b4fe385SDimitry Andric
1704b4fe385SDimitry Andric if (!MI->isFullCopy()) {
1714b4fe385SDimitry Andric // If src RC contains super registers of dst RC, it can also be coalesced.
1724b4fe385SDimitry Andric if (MI->getOperand(0).getSubReg() || Src.isPhysical() || Dst.isPhysical())
1734b4fe385SDimitry Andric return false;
1744b4fe385SDimitry Andric
1754b4fe385SDimitry Andric auto SrcSub = MI->getOperand(1).getSubReg();
1764b4fe385SDimitry Andric auto SrcRC = MRI->getRegClass(Src);
1774b4fe385SDimitry Andric auto DstRC = MRI->getRegClass(Dst);
1784b4fe385SDimitry Andric return TRI->getMatchingSuperRegClass(SrcRC, DstRC, SrcSub) != nullptr;
1794b4fe385SDimitry Andric }
1804b4fe385SDimitry Andric
1814b4fe385SDimitry Andric if (Src.isPhysical() && Dst.isPhysical())
1824b4fe385SDimitry Andric return Src == Dst;
1834b4fe385SDimitry Andric
1844b4fe385SDimitry Andric if (Src.isVirtual() && Dst.isVirtual()) {
1854b4fe385SDimitry Andric auto SrcRC = MRI->getRegClass(Src);
1864b4fe385SDimitry Andric auto DstRC = MRI->getRegClass(Dst);
1874b4fe385SDimitry Andric return SrcRC->hasSuperClassEq(DstRC) || SrcRC->hasSubClassEq(DstRC);
1884b4fe385SDimitry Andric }
1894b4fe385SDimitry Andric
1904b4fe385SDimitry Andric if (Src.isVirtual())
1914b4fe385SDimitry Andric std::swap(Src, Dst);
1924b4fe385SDimitry Andric
1934b4fe385SDimitry Andric // Now Src is physical register, Dst is virtual register.
1944b4fe385SDimitry Andric auto DstRC = MRI->getRegClass(Dst);
1954b4fe385SDimitry Andric return DstRC->contains(Src);
1964b4fe385SDimitry Andric }
1974b4fe385SDimitry Andric
1985a5ac124SDimitry Andric /// Computes depth of instructions in vector \InsInstr.
19967c32a98SDimitry Andric ///
20067c32a98SDimitry Andric /// \param InsInstrs is a vector of machine instructions
20167c32a98SDimitry Andric /// \param InstrIdxForVirtReg is a dense map of virtual register to index
20267c32a98SDimitry Andric /// of defining machine instruction in \p InsInstrs
20367c32a98SDimitry Andric /// \param BlockTrace is a trace of machine instructions
20467c32a98SDimitry Andric ///
20567c32a98SDimitry Andric /// \returns Depth of last instruction in \InsInstrs ("NewRoot")
20667c32a98SDimitry Andric unsigned
getDepth(SmallVectorImpl<MachineInstr * > & InsInstrs,DenseMap<unsigned,unsigned> & InstrIdxForVirtReg,MachineTraceMetrics::Trace BlockTrace,const MachineBasicBlock & MBB)20767c32a98SDimitry Andric MachineCombiner::getDepth(SmallVectorImpl<MachineInstr *> &InsInstrs,
20867c32a98SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg,
2097fa27ce4SDimitry Andric MachineTraceMetrics::Trace BlockTrace,
2107fa27ce4SDimitry Andric const MachineBasicBlock &MBB) {
21167c32a98SDimitry Andric SmallVector<unsigned, 16> InstrDepth;
2125a5ac124SDimitry Andric // For each instruction in the new sequence compute the depth based on the
21367c32a98SDimitry Andric // operands. Use the trace information when possible. For new operands which
21467c32a98SDimitry Andric // are tracked in the InstrIdxForVirtReg map depth is looked up in InstrDepth
21567c32a98SDimitry Andric for (auto *InstrPtr : InsInstrs) { // for each Use
21667c32a98SDimitry Andric unsigned IDepth = 0;
2177fa27ce4SDimitry Andric for (const MachineOperand &MO : InstrPtr->all_uses()) {
21867c32a98SDimitry Andric // Check for virtual register operand.
2197fa27ce4SDimitry Andric if (!MO.getReg().isVirtual())
22067c32a98SDimitry Andric continue;
22167c32a98SDimitry Andric unsigned DepthOp = 0;
22267c32a98SDimitry Andric unsigned LatencyOp = 0;
22367c32a98SDimitry Andric DenseMap<unsigned, unsigned>::iterator II =
22467c32a98SDimitry Andric InstrIdxForVirtReg.find(MO.getReg());
22567c32a98SDimitry Andric if (II != InstrIdxForVirtReg.end()) {
22667c32a98SDimitry Andric // Operand is new virtual register not in trace
22767c32a98SDimitry Andric assert(II->second < InstrDepth.size() && "Bad Index");
22867c32a98SDimitry Andric MachineInstr *DefInstr = InsInstrs[II->second];
22967c32a98SDimitry Andric assert(DefInstr &&
23067c32a98SDimitry Andric "There must be a definition for a new virtual register");
23167c32a98SDimitry Andric DepthOp = InstrDepth[II->second];
232ac9a064cSDimitry Andric int DefIdx =
233ac9a064cSDimitry Andric DefInstr->findRegisterDefOperandIdx(MO.getReg(), /*TRI=*/nullptr);
234ac9a064cSDimitry Andric int UseIdx =
235ac9a064cSDimitry Andric InstrPtr->findRegisterUseOperandIdx(MO.getReg(), /*TRI=*/nullptr);
236044eb2f6SDimitry Andric LatencyOp = TSchedModel.computeOperandLatency(DefInstr, DefIdx,
237044eb2f6SDimitry Andric InstrPtr, UseIdx);
23867c32a98SDimitry Andric } else {
23967c32a98SDimitry Andric MachineInstr *DefInstr = getOperandDef(MO);
2407fa27ce4SDimitry Andric if (DefInstr && (TII->getMachineCombinerTraceStrategy() !=
2417fa27ce4SDimitry Andric MachineTraceStrategy::TS_Local ||
2427fa27ce4SDimitry Andric DefInstr->getParent() == &MBB)) {
24301095a5dSDimitry Andric DepthOp = BlockTrace.getInstrCycles(*DefInstr).Depth;
2444b4fe385SDimitry Andric if (!isTransientMI(DefInstr))
24567c32a98SDimitry Andric LatencyOp = TSchedModel.computeOperandLatency(
246ac9a064cSDimitry Andric DefInstr,
247ac9a064cSDimitry Andric DefInstr->findRegisterDefOperandIdx(MO.getReg(),
248ac9a064cSDimitry Andric /*TRI=*/nullptr),
249ac9a064cSDimitry Andric InstrPtr,
250ac9a064cSDimitry Andric InstrPtr->findRegisterUseOperandIdx(MO.getReg(),
251ac9a064cSDimitry Andric /*TRI=*/nullptr));
25267c32a98SDimitry Andric }
25367c32a98SDimitry Andric }
25467c32a98SDimitry Andric IDepth = std::max(IDepth, DepthOp + LatencyOp);
25567c32a98SDimitry Andric }
25667c32a98SDimitry Andric InstrDepth.push_back(IDepth);
25767c32a98SDimitry Andric }
25867c32a98SDimitry Andric unsigned NewRootIdx = InsInstrs.size() - 1;
25967c32a98SDimitry Andric return InstrDepth[NewRootIdx];
26067c32a98SDimitry Andric }
26167c32a98SDimitry Andric
2625a5ac124SDimitry Andric /// Computes instruction latency as max of latency of defined operands.
26367c32a98SDimitry Andric ///
26467c32a98SDimitry Andric /// \param Root is a machine instruction that could be replaced by NewRoot.
26567c32a98SDimitry Andric /// It is used to compute a more accurate latency information for NewRoot in
26667c32a98SDimitry Andric /// case there is a dependent instruction in the same trace (\p BlockTrace)
26767c32a98SDimitry Andric /// \param NewRoot is the instruction for which the latency is computed
26867c32a98SDimitry Andric /// \param BlockTrace is a trace of machine instructions
26967c32a98SDimitry Andric ///
27067c32a98SDimitry Andric /// \returns Latency of \p NewRoot
getLatency(MachineInstr * Root,MachineInstr * NewRoot,MachineTraceMetrics::Trace BlockTrace)27167c32a98SDimitry Andric unsigned MachineCombiner::getLatency(MachineInstr *Root, MachineInstr *NewRoot,
27267c32a98SDimitry Andric MachineTraceMetrics::Trace BlockTrace) {
27367c32a98SDimitry Andric // Check each definition in NewRoot and compute the latency
27467c32a98SDimitry Andric unsigned NewRootLatency = 0;
27567c32a98SDimitry Andric
2767fa27ce4SDimitry Andric for (const MachineOperand &MO : NewRoot->all_defs()) {
27767c32a98SDimitry Andric // Check for virtual register operand.
2787fa27ce4SDimitry Andric if (!MO.getReg().isVirtual())
27967c32a98SDimitry Andric continue;
28067c32a98SDimitry Andric // Get the first instruction that uses MO
28167c32a98SDimitry Andric MachineRegisterInfo::reg_iterator RI = MRI->reg_begin(MO.getReg());
28267c32a98SDimitry Andric RI++;
283d8e91e46SDimitry Andric if (RI == MRI->reg_end())
284d8e91e46SDimitry Andric continue;
28567c32a98SDimitry Andric MachineInstr *UseMO = RI->getParent();
28667c32a98SDimitry Andric unsigned LatencyOp = 0;
28701095a5dSDimitry Andric if (UseMO && BlockTrace.isDepInTrace(*Root, *UseMO)) {
28867c32a98SDimitry Andric LatencyOp = TSchedModel.computeOperandLatency(
289ac9a064cSDimitry Andric NewRoot,
290ac9a064cSDimitry Andric NewRoot->findRegisterDefOperandIdx(MO.getReg(), /*TRI=*/nullptr),
291ac9a064cSDimitry Andric UseMO,
292ac9a064cSDimitry Andric UseMO->findRegisterUseOperandIdx(MO.getReg(), /*TRI=*/nullptr));
29367c32a98SDimitry Andric } else {
294dd58ef01SDimitry Andric LatencyOp = TSchedModel.computeInstrLatency(NewRoot);
29567c32a98SDimitry Andric }
29667c32a98SDimitry Andric NewRootLatency = std::max(NewRootLatency, LatencyOp);
29767c32a98SDimitry Andric }
29867c32a98SDimitry Andric return NewRootLatency;
29967c32a98SDimitry Andric }
30067c32a98SDimitry Andric
getCombinerObjective(unsigned Pattern)301ac9a064cSDimitry Andric CombinerObjective MachineCombiner::getCombinerObjective(unsigned Pattern) {
302dd58ef01SDimitry Andric // TODO: If C++ ever gets a real enum class, make this part of the
303dd58ef01SDimitry Andric // MachineCombinerPattern class.
304ac9a064cSDimitry Andric switch (Pattern) {
305dd58ef01SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
306dd58ef01SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
307dd58ef01SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
308dd58ef01SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
309dd58ef01SDimitry Andric return CombinerObjective::MustReduceDepth;
310dd58ef01SDimitry Andric default:
311ac9a064cSDimitry Andric return TII->getCombinerObjective(Pattern);
312dd58ef01SDimitry Andric }
313dd58ef01SDimitry Andric }
314dd58ef01SDimitry Andric
315eb11fae6SDimitry Andric /// Estimate the latency of the new and original instruction sequence by summing
316eb11fae6SDimitry Andric /// up the latencies of the inserted and deleted instructions. This assumes
317eb11fae6SDimitry Andric /// that the inserted and deleted instructions are dependent instruction chains,
318eb11fae6SDimitry Andric /// which might not hold in all cases.
getLatenciesForInstrSequences(MachineInstr & MI,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,MachineTraceMetrics::Trace BlockTrace)319eb11fae6SDimitry Andric std::pair<unsigned, unsigned> MachineCombiner::getLatenciesForInstrSequences(
320eb11fae6SDimitry Andric MachineInstr &MI, SmallVectorImpl<MachineInstr *> &InsInstrs,
321eb11fae6SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
322eb11fae6SDimitry Andric MachineTraceMetrics::Trace BlockTrace) {
323eb11fae6SDimitry Andric assert(!InsInstrs.empty() && "Only support sequences that insert instrs.");
324eb11fae6SDimitry Andric unsigned NewRootLatency = 0;
325eb11fae6SDimitry Andric // NewRoot is the last instruction in the \p InsInstrs vector.
326eb11fae6SDimitry Andric MachineInstr *NewRoot = InsInstrs.back();
327eb11fae6SDimitry Andric for (unsigned i = 0; i < InsInstrs.size() - 1; i++)
328eb11fae6SDimitry Andric NewRootLatency += TSchedModel.computeInstrLatency(InsInstrs[i]);
329eb11fae6SDimitry Andric NewRootLatency += getLatency(&MI, NewRoot, BlockTrace);
330eb11fae6SDimitry Andric
331eb11fae6SDimitry Andric unsigned RootLatency = 0;
3324b4fe385SDimitry Andric for (auto *I : DelInstrs)
333eb11fae6SDimitry Andric RootLatency += TSchedModel.computeInstrLatency(I);
334eb11fae6SDimitry Andric
335eb11fae6SDimitry Andric return {NewRootLatency, RootLatency};
336eb11fae6SDimitry Andric }
337eb11fae6SDimitry Andric
reduceRegisterPressure(MachineInstr & Root,MachineBasicBlock * MBB,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,unsigned Pattern)338b60736ecSDimitry Andric bool MachineCombiner::reduceRegisterPressure(
339b60736ecSDimitry Andric MachineInstr &Root, MachineBasicBlock *MBB,
340b60736ecSDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
341ac9a064cSDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs, unsigned Pattern) {
342b60736ecSDimitry Andric // FIXME: for now, we don't do any check for the register pressure patterns.
343b60736ecSDimitry Andric // We treat them as always profitable. But we can do better if we make
344b60736ecSDimitry Andric // RegPressureTracker class be aware of TIE attribute. Then we can get an
345b60736ecSDimitry Andric // accurate compare of register pressure with DelInstrs or InsInstrs.
346b60736ecSDimitry Andric return true;
347b60736ecSDimitry Andric }
348b60736ecSDimitry Andric
3491a82d4c0SDimitry Andric /// The DAGCombine code sequence ends in MI (Machine Instruction) Root.
3501a82d4c0SDimitry Andric /// The new code sequence ends in MI NewRoot. A necessary condition for the new
3511a82d4c0SDimitry Andric /// sequence to replace the old sequence is that it cannot lengthen the critical
352dd58ef01SDimitry Andric /// path. The definition of "improve" may be restricted by specifying that the
353dd58ef01SDimitry Andric /// new path improves the data dependency chain (MustReduceDepth).
improvesCriticalPathLen(MachineBasicBlock * MBB,MachineInstr * Root,MachineTraceMetrics::Trace BlockTrace,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,DenseMap<unsigned,unsigned> & InstrIdxForVirtReg,unsigned Pattern,bool SlackIsAccurate)3541a82d4c0SDimitry Andric bool MachineCombiner::improvesCriticalPathLen(
35567c32a98SDimitry Andric MachineBasicBlock *MBB, MachineInstr *Root,
35667c32a98SDimitry Andric MachineTraceMetrics::Trace BlockTrace,
35767c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
358b915e9e0SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
359ac9a064cSDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg, unsigned Pattern,
360044eb2f6SDimitry Andric bool SlackIsAccurate) {
361dd58ef01SDimitry Andric // Get depth and latency of NewRoot and Root.
3627fa27ce4SDimitry Andric unsigned NewRootDepth =
3637fa27ce4SDimitry Andric getDepth(InsInstrs, InstrIdxForVirtReg, BlockTrace, *MBB);
36401095a5dSDimitry Andric unsigned RootDepth = BlockTrace.getInstrCycles(*Root).Depth;
365dd58ef01SDimitry Andric
366eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << " Dependence data for " << *Root << "\tNewRootDepth: "
367eb11fae6SDimitry Andric << NewRootDepth << "\tRootDepth: " << RootDepth);
368dd58ef01SDimitry Andric
369dd58ef01SDimitry Andric // For a transform such as reassociation, the cost equation is
370dd58ef01SDimitry Andric // conservatively calculated so that we must improve the depth (data
371dd58ef01SDimitry Andric // dependency cycles) in the critical path to proceed with the transform.
372dd58ef01SDimitry Andric // Being conservative also protects against inaccuracies in the underlying
373dd58ef01SDimitry Andric // machine trace metrics and CPU models.
374eb11fae6SDimitry Andric if (getCombinerObjective(Pattern) == CombinerObjective::MustReduceDepth) {
375eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << "\tIt MustReduceDepth ");
376eb11fae6SDimitry Andric LLVM_DEBUG(NewRootDepth < RootDepth
377eb11fae6SDimitry Andric ? dbgs() << "\t and it does it\n"
378eb11fae6SDimitry Andric : dbgs() << "\t but it does NOT do it\n");
379dd58ef01SDimitry Andric return NewRootDepth < RootDepth;
380eb11fae6SDimitry Andric }
381dd58ef01SDimitry Andric
382dd58ef01SDimitry Andric // A more flexible cost calculation for the critical path includes the slack
383dd58ef01SDimitry Andric // of the original code sequence. This may allow the transform to proceed
384dd58ef01SDimitry Andric // even if the instruction depths (data dependency cycles) become worse.
385b915e9e0SDimitry Andric
386044eb2f6SDimitry Andric // Account for the latency of the inserted and deleted instructions by
387eb11fae6SDimitry Andric unsigned NewRootLatency, RootLatency;
3887fa27ce4SDimitry Andric if (TII->accumulateInstrSeqToRootLatency(*Root)) {
389eb11fae6SDimitry Andric std::tie(NewRootLatency, RootLatency) =
390eb11fae6SDimitry Andric getLatenciesForInstrSequences(*Root, InsInstrs, DelInstrs, BlockTrace);
3917fa27ce4SDimitry Andric } else {
3927fa27ce4SDimitry Andric NewRootLatency = TSchedModel.computeInstrLatency(InsInstrs.back());
3937fa27ce4SDimitry Andric RootLatency = TSchedModel.computeInstrLatency(Root);
3947fa27ce4SDimitry Andric }
395b915e9e0SDimitry Andric
39601095a5dSDimitry Andric unsigned RootSlack = BlockTrace.getInstrSlack(*Root);
397044eb2f6SDimitry Andric unsigned NewCycleCount = NewRootDepth + NewRootLatency;
398eb11fae6SDimitry Andric unsigned OldCycleCount =
399eb11fae6SDimitry Andric RootDepth + RootLatency + (SlackIsAccurate ? RootSlack : 0);
400eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << "\n\tNewRootLatency: " << NewRootLatency
401eb11fae6SDimitry Andric << "\tRootLatency: " << RootLatency << "\n\tRootSlack: "
402eb11fae6SDimitry Andric << RootSlack << " SlackIsAccurate=" << SlackIsAccurate
403eb11fae6SDimitry Andric << "\n\tNewRootDepth + NewRootLatency = " << NewCycleCount
404eb11fae6SDimitry Andric << "\n\tRootDepth + RootLatency + RootSlack = "
405eb11fae6SDimitry Andric << OldCycleCount;);
406eb11fae6SDimitry Andric LLVM_DEBUG(NewCycleCount <= OldCycleCount
407eb11fae6SDimitry Andric ? dbgs() << "\n\t It IMPROVES PathLen because"
408eb11fae6SDimitry Andric : dbgs() << "\n\t It DOES NOT improve PathLen because");
409eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << "\n\t\tNewCycleCount = " << NewCycleCount
410eb11fae6SDimitry Andric << ", OldCycleCount = " << OldCycleCount << "\n");
4111a82d4c0SDimitry Andric
4121a82d4c0SDimitry Andric return NewCycleCount <= OldCycleCount;
41367c32a98SDimitry Andric }
41467c32a98SDimitry Andric
41567c32a98SDimitry Andric /// helper routine to convert instructions into SC
instr2instrSC(SmallVectorImpl<MachineInstr * > & Instrs,SmallVectorImpl<const MCSchedClassDesc * > & InstrsSC)41667c32a98SDimitry Andric void MachineCombiner::instr2instrSC(
41767c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &Instrs,
41867c32a98SDimitry Andric SmallVectorImpl<const MCSchedClassDesc *> &InstrsSC) {
41967c32a98SDimitry Andric for (auto *InstrPtr : Instrs) {
42067c32a98SDimitry Andric unsigned Opc = InstrPtr->getOpcode();
42167c32a98SDimitry Andric unsigned Idx = TII->get(Opc).getSchedClass();
42267c32a98SDimitry Andric const MCSchedClassDesc *SC = SchedModel.getSchedClassDesc(Idx);
42367c32a98SDimitry Andric InstrsSC.push_back(SC);
42467c32a98SDimitry Andric }
42567c32a98SDimitry Andric }
426dd58ef01SDimitry Andric
4275a5ac124SDimitry Andric /// True when the new instructions do not increase resource length
preservesResourceLen(MachineBasicBlock * MBB,MachineTraceMetrics::Trace BlockTrace,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs)42867c32a98SDimitry Andric bool MachineCombiner::preservesResourceLen(
42967c32a98SDimitry Andric MachineBasicBlock *MBB, MachineTraceMetrics::Trace BlockTrace,
43067c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
43167c32a98SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs) {
432dd58ef01SDimitry Andric if (!TSchedModel.hasInstrSchedModel())
433dd58ef01SDimitry Andric return true;
43467c32a98SDimitry Andric
43567c32a98SDimitry Andric // Compute current resource length
43667c32a98SDimitry Andric
43767c32a98SDimitry Andric //ArrayRef<const MachineBasicBlock *> MBBarr(MBB);
43867c32a98SDimitry Andric SmallVector <const MachineBasicBlock *, 1> MBBarr;
43967c32a98SDimitry Andric MBBarr.push_back(MBB);
44067c32a98SDimitry Andric unsigned ResLenBeforeCombine = BlockTrace.getResourceLength(MBBarr);
44167c32a98SDimitry Andric
44267c32a98SDimitry Andric // Deal with SC rather than Instructions.
44367c32a98SDimitry Andric SmallVector<const MCSchedClassDesc *, 16> InsInstrsSC;
44467c32a98SDimitry Andric SmallVector<const MCSchedClassDesc *, 16> DelInstrsSC;
44567c32a98SDimitry Andric
44667c32a98SDimitry Andric instr2instrSC(InsInstrs, InsInstrsSC);
44767c32a98SDimitry Andric instr2instrSC(DelInstrs, DelInstrsSC);
44867c32a98SDimitry Andric
449e3b55780SDimitry Andric ArrayRef<const MCSchedClassDesc *> MSCInsArr{InsInstrsSC};
450e3b55780SDimitry Andric ArrayRef<const MCSchedClassDesc *> MSCDelArr{DelInstrsSC};
45167c32a98SDimitry Andric
4523a0822f0SDimitry Andric // Compute new resource length.
45367c32a98SDimitry Andric unsigned ResLenAfterCombine =
45467c32a98SDimitry Andric BlockTrace.getResourceLength(MBBarr, MSCInsArr, MSCDelArr);
45567c32a98SDimitry Andric
456eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << "\t\tResource length before replacement: "
457eb11fae6SDimitry Andric << ResLenBeforeCombine
458eb11fae6SDimitry Andric << " and after: " << ResLenAfterCombine << "\n";);
459eb11fae6SDimitry Andric LLVM_DEBUG(
460cfca06d7SDimitry Andric ResLenAfterCombine <=
461cfca06d7SDimitry Andric ResLenBeforeCombine + TII->getExtendResourceLenLimit()
462eb11fae6SDimitry Andric ? dbgs() << "\t\t As result it IMPROVES/PRESERVES Resource Length\n"
463eb11fae6SDimitry Andric : dbgs() << "\t\t As result it DOES NOT improve/preserve Resource "
464eb11fae6SDimitry Andric "Length\n");
46567c32a98SDimitry Andric
466cfca06d7SDimitry Andric return ResLenAfterCombine <=
467cfca06d7SDimitry Andric ResLenBeforeCombine + TII->getExtendResourceLenLimit();
46867c32a98SDimitry Andric }
46967c32a98SDimitry Andric
470044eb2f6SDimitry Andric /// Inserts InsInstrs and deletes DelInstrs. Incrementally updates instruction
471044eb2f6SDimitry Andric /// depths if requested.
472044eb2f6SDimitry Andric ///
473044eb2f6SDimitry Andric /// \param MBB basic block to insert instructions in
474044eb2f6SDimitry Andric /// \param MI current machine instruction
475044eb2f6SDimitry Andric /// \param InsInstrs new instructions to insert in \p MBB
476044eb2f6SDimitry Andric /// \param DelInstrs instruction to delete from \p MBB
4777fa27ce4SDimitry Andric /// \param TraceEnsemble is a pointer to the machine trace information
478044eb2f6SDimitry Andric /// \param RegUnits set of live registers, needed to compute instruction depths
479b60736ecSDimitry Andric /// \param TII is target instruction info, used to call target hook
480b60736ecSDimitry Andric /// \param Pattern is used to call target hook finalizeInsInstrs
481044eb2f6SDimitry Andric /// \param IncrementalUpdate if true, compute instruction depths incrementally,
482044eb2f6SDimitry Andric /// otherwise invalidate the trace
483ac9a064cSDimitry Andric static void
insertDeleteInstructions(MachineBasicBlock * MBB,MachineInstr & MI,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,MachineTraceMetrics::Ensemble * TraceEnsemble,SparseSet<LiveRegUnit> & RegUnits,const TargetInstrInfo * TII,unsigned Pattern,bool IncrementalUpdate)484ac9a064cSDimitry Andric insertDeleteInstructions(MachineBasicBlock *MBB, MachineInstr &MI,
4857fa27ce4SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
4867fa27ce4SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
4877fa27ce4SDimitry Andric MachineTraceMetrics::Ensemble *TraceEnsemble,
488ac9a064cSDimitry Andric SparseSet<LiveRegUnit> &RegUnits,
489ac9a064cSDimitry Andric const TargetInstrInfo *TII, unsigned Pattern,
490ac9a064cSDimitry Andric bool IncrementalUpdate) {
491b60736ecSDimitry Andric // If we want to fix up some placeholder for some target, do it now.
492b60736ecSDimitry Andric // We need this because in genAlternativeCodeSequence, we have not decided the
493b60736ecSDimitry Andric // better pattern InsInstrs or DelInstrs, so we don't want generate some
494b60736ecSDimitry Andric // sideeffect to the function. For example we need to delay the constant pool
495b60736ecSDimitry Andric // entry creation here after InsInstrs is selected as better pattern.
496b60736ecSDimitry Andric // Otherwise the constant pool entry created for InsInstrs will not be deleted
497b60736ecSDimitry Andric // even if InsInstrs is not the better pattern.
498b60736ecSDimitry Andric TII->finalizeInsInstrs(MI, Pattern, InsInstrs);
499b60736ecSDimitry Andric
50071d5a254SDimitry Andric for (auto *InstrPtr : InsInstrs)
50171d5a254SDimitry Andric MBB->insert((MachineBasicBlock::iterator)&MI, InstrPtr);
502044eb2f6SDimitry Andric
503044eb2f6SDimitry Andric for (auto *InstrPtr : DelInstrs) {
50477fc4c14SDimitry Andric InstrPtr->eraseFromParent();
505044eb2f6SDimitry Andric // Erase all LiveRegs defined by the removed instruction
5064b4fe385SDimitry Andric for (auto *I = RegUnits.begin(); I != RegUnits.end();) {
507044eb2f6SDimitry Andric if (I->MI == InstrPtr)
508044eb2f6SDimitry Andric I = RegUnits.erase(I);
509044eb2f6SDimitry Andric else
510044eb2f6SDimitry Andric I++;
511044eb2f6SDimitry Andric }
512044eb2f6SDimitry Andric }
513044eb2f6SDimitry Andric
514044eb2f6SDimitry Andric if (IncrementalUpdate)
515044eb2f6SDimitry Andric for (auto *InstrPtr : InsInstrs)
5167fa27ce4SDimitry Andric TraceEnsemble->updateDepth(MBB, *InstrPtr, RegUnits);
517044eb2f6SDimitry Andric else
5187fa27ce4SDimitry Andric TraceEnsemble->invalidate(MBB);
519044eb2f6SDimitry Andric
520044eb2f6SDimitry Andric NumInstCombined++;
52171d5a254SDimitry Andric }
52271d5a254SDimitry Andric
523eb11fae6SDimitry Andric // Check that the difference between original and new latency is decreasing for
524eb11fae6SDimitry Andric // later patterns. This helps to discover sub-optimal pattern orderings.
verifyPatternOrder(MachineBasicBlock * MBB,MachineInstr & Root,SmallVector<unsigned,16> & Patterns)525ac9a064cSDimitry Andric void MachineCombiner::verifyPatternOrder(MachineBasicBlock *MBB,
526ac9a064cSDimitry Andric MachineInstr &Root,
527ac9a064cSDimitry Andric SmallVector<unsigned, 16> &Patterns) {
528eb11fae6SDimitry Andric long PrevLatencyDiff = std::numeric_limits<long>::max();
529eb11fae6SDimitry Andric (void)PrevLatencyDiff; // Variable is used in assert only.
530eb11fae6SDimitry Andric for (auto P : Patterns) {
531eb11fae6SDimitry Andric SmallVector<MachineInstr *, 16> InsInstrs;
532eb11fae6SDimitry Andric SmallVector<MachineInstr *, 16> DelInstrs;
533eb11fae6SDimitry Andric DenseMap<unsigned, unsigned> InstrIdxForVirtReg;
534eb11fae6SDimitry Andric TII->genAlternativeCodeSequence(Root, P, InsInstrs, DelInstrs,
535eb11fae6SDimitry Andric InstrIdxForVirtReg);
536eb11fae6SDimitry Andric // Found pattern, but did not generate alternative sequence.
537eb11fae6SDimitry Andric // This can happen e.g. when an immediate could not be materialized
538eb11fae6SDimitry Andric // in a single instruction.
539eb11fae6SDimitry Andric if (InsInstrs.empty() || !TSchedModel.hasInstrSchedModelOrItineraries())
540eb11fae6SDimitry Andric continue;
541eb11fae6SDimitry Andric
542eb11fae6SDimitry Andric unsigned NewRootLatency, RootLatency;
543eb11fae6SDimitry Andric std::tie(NewRootLatency, RootLatency) = getLatenciesForInstrSequences(
5447fa27ce4SDimitry Andric Root, InsInstrs, DelInstrs, TraceEnsemble->getTrace(MBB));
545eb11fae6SDimitry Andric long CurrentLatencyDiff = ((long)RootLatency) - ((long)NewRootLatency);
546eb11fae6SDimitry Andric assert(CurrentLatencyDiff <= PrevLatencyDiff &&
547eb11fae6SDimitry Andric "Current pattern is better than previous pattern.");
548eb11fae6SDimitry Andric PrevLatencyDiff = CurrentLatencyDiff;
549eb11fae6SDimitry Andric }
550eb11fae6SDimitry Andric }
551eb11fae6SDimitry Andric
5525a5ac124SDimitry Andric /// Substitute a slow code sequence with a faster one by
55367c32a98SDimitry Andric /// evaluating instruction combining pattern.
55467c32a98SDimitry Andric /// The prototype of such a pattern is MUl + ADD -> MADD. Performs instruction
55567c32a98SDimitry Andric /// combining based on machine trace metrics. Only combine a sequence of
55667c32a98SDimitry Andric /// instructions when this neither lengthens the critical path nor increases
55767c32a98SDimitry Andric /// resource pressure. When optimizing for codesize always combine when the new
55867c32a98SDimitry Andric /// sequence is shorter.
combineInstructions(MachineBasicBlock * MBB)55967c32a98SDimitry Andric bool MachineCombiner::combineInstructions(MachineBasicBlock *MBB) {
56067c32a98SDimitry Andric bool Changed = false;
561eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << "Combining MBB " << MBB->getName() << "\n");
56267c32a98SDimitry Andric
563044eb2f6SDimitry Andric bool IncrementalUpdate = false;
56467c32a98SDimitry Andric auto BlockIter = MBB->begin();
565044eb2f6SDimitry Andric decltype(BlockIter) LastUpdate;
56601095a5dSDimitry Andric // Check if the block is in a loop.
56701095a5dSDimitry Andric const MachineLoop *ML = MLI->getLoopFor(MBB);
5687fa27ce4SDimitry Andric if (!TraceEnsemble)
5697fa27ce4SDimitry Andric TraceEnsemble = Traces->getEnsemble(TII->getMachineCombinerTraceStrategy());
570044eb2f6SDimitry Andric
571044eb2f6SDimitry Andric SparseSet<LiveRegUnit> RegUnits;
572044eb2f6SDimitry Andric RegUnits.setUniverse(TRI->getNumRegUnits());
57367c32a98SDimitry Andric
574706b4fc4SDimitry Andric bool OptForSize = OptSize || llvm::shouldOptimizeForSize(MBB, PSI, MBFI);
575706b4fc4SDimitry Andric
576b60736ecSDimitry Andric bool DoRegPressureReduce =
577b60736ecSDimitry Andric TII->shouldReduceRegisterPressure(MBB, &RegClassInfo);
578b60736ecSDimitry Andric
57967c32a98SDimitry Andric while (BlockIter != MBB->end()) {
58067c32a98SDimitry Andric auto &MI = *BlockIter++;
581ac9a064cSDimitry Andric SmallVector<unsigned, 16> Patterns;
58267c32a98SDimitry Andric // The motivating example is:
58367c32a98SDimitry Andric //
58467c32a98SDimitry Andric // MUL Other MUL_op1 MUL_op2 Other
58567c32a98SDimitry Andric // \ / \ | /
58667c32a98SDimitry Andric // ADD/SUB => MADD/MSUB
58767c32a98SDimitry Andric // (=Root) (=NewRoot)
58867c32a98SDimitry Andric
58967c32a98SDimitry Andric // The DAGCombine code always replaced MUL + ADD/SUB by MADD. While this is
59067c32a98SDimitry Andric // usually beneficial for code size it unfortunately can hurt performance
59167c32a98SDimitry Andric // when the ADD is on the critical path, but the MUL is not. With the
59267c32a98SDimitry Andric // substitution the MUL becomes part of the critical path (in form of the
59367c32a98SDimitry Andric // MADD) and can lengthen it on architectures where the MADD latency is
59467c32a98SDimitry Andric // longer than the ADD latency.
59567c32a98SDimitry Andric //
59667c32a98SDimitry Andric // For each instruction we check if it can be the root of a combiner
59767c32a98SDimitry Andric // pattern. Then for each pattern the new code sequence in form of MI is
59867c32a98SDimitry Andric // generated and evaluated. When the efficiency criteria (don't lengthen
59967c32a98SDimitry Andric // critical path, don't use more resources) is met the new sequence gets
60067c32a98SDimitry Andric // hooked up into the basic block before the old sequence is removed.
60167c32a98SDimitry Andric //
60267c32a98SDimitry Andric // The algorithm does not try to evaluate all patterns and pick the best.
60367c32a98SDimitry Andric // This is only an artificial restriction though. In practice there is
6043a0822f0SDimitry Andric // mostly one pattern, and getMachineCombinerPatterns() can order patterns
605eb11fae6SDimitry Andric // based on an internal cost heuristic. If
606eb11fae6SDimitry Andric // machine-combiner-verify-pattern-order is enabled, all patterns are
607eb11fae6SDimitry Andric // checked to ensure later patterns do not provide better latency savings.
60867c32a98SDimitry Andric
609b60736ecSDimitry Andric if (!TII->getMachineCombinerPatterns(MI, Patterns, DoRegPressureReduce))
610dd58ef01SDimitry Andric continue;
611dd58ef01SDimitry Andric
612eb11fae6SDimitry Andric if (VerifyPatternOrder)
613eb11fae6SDimitry Andric verifyPatternOrder(MBB, MI, Patterns);
614eb11fae6SDimitry Andric
615e3b55780SDimitry Andric for (const auto P : Patterns) {
61667c32a98SDimitry Andric SmallVector<MachineInstr *, 16> InsInstrs;
61767c32a98SDimitry Andric SmallVector<MachineInstr *, 16> DelInstrs;
61867c32a98SDimitry Andric DenseMap<unsigned, unsigned> InstrIdxForVirtReg;
61967c32a98SDimitry Andric TII->genAlternativeCodeSequence(MI, P, InsInstrs, DelInstrs,
62067c32a98SDimitry Andric InstrIdxForVirtReg);
62167c32a98SDimitry Andric // Found pattern, but did not generate alternative sequence.
62267c32a98SDimitry Andric // This can happen e.g. when an immediate could not be materialized
62367c32a98SDimitry Andric // in a single instruction.
624e3b55780SDimitry Andric if (InsInstrs.empty())
62567c32a98SDimitry Andric continue;
626dd58ef01SDimitry Andric
627eb11fae6SDimitry Andric LLVM_DEBUG(if (dump_intrs) {
628e6d15924SDimitry Andric dbgs() << "\tFor the Pattern (" << (int)P
629e6d15924SDimitry Andric << ") these instructions could be removed\n";
630e6d15924SDimitry Andric for (auto const *InstrPtr : DelInstrs)
631e6d15924SDimitry Andric InstrPtr->print(dbgs(), /*IsStandalone*/false, /*SkipOpers*/false,
632e6d15924SDimitry Andric /*SkipDebugLoc*/false, /*AddNewLine*/true, TII);
633eb11fae6SDimitry Andric dbgs() << "\tThese instructions could replace the removed ones\n";
634e6d15924SDimitry Andric for (auto const *InstrPtr : InsInstrs)
635e6d15924SDimitry Andric InstrPtr->print(dbgs(), /*IsStandalone*/false, /*SkipOpers*/false,
636e6d15924SDimitry Andric /*SkipDebugLoc*/false, /*AddNewLine*/true, TII);
637eb11fae6SDimitry Andric });
638eb11fae6SDimitry Andric
639b60736ecSDimitry Andric if (IncrementalUpdate && LastUpdate != BlockIter) {
640044eb2f6SDimitry Andric // Update depths since the last incremental update.
6417fa27ce4SDimitry Andric TraceEnsemble->updateDepths(LastUpdate, BlockIter, RegUnits);
642044eb2f6SDimitry Andric LastUpdate = BlockIter;
643044eb2f6SDimitry Andric }
644044eb2f6SDimitry Andric
645b60736ecSDimitry Andric if (DoRegPressureReduce &&
646b60736ecSDimitry Andric getCombinerObjective(P) ==
647b60736ecSDimitry Andric CombinerObjective::MustReduceRegisterPressure) {
648b60736ecSDimitry Andric if (MBB->size() > inc_threshold) {
649b60736ecSDimitry Andric // Use incremental depth updates for basic blocks above threshold
650b60736ecSDimitry Andric IncrementalUpdate = true;
651b60736ecSDimitry Andric LastUpdate = BlockIter;
652b60736ecSDimitry Andric }
653b60736ecSDimitry Andric if (reduceRegisterPressure(MI, MBB, InsInstrs, DelInstrs, P)) {
654b60736ecSDimitry Andric // Replace DelInstrs with InsInstrs.
6557fa27ce4SDimitry Andric insertDeleteInstructions(MBB, MI, InsInstrs, DelInstrs, TraceEnsemble,
656b60736ecSDimitry Andric RegUnits, TII, P, IncrementalUpdate);
657b60736ecSDimitry Andric Changed |= true;
658b60736ecSDimitry Andric
659b60736ecSDimitry Andric // Go back to previous instruction as it may have ILP reassociation
660b60736ecSDimitry Andric // opportunity.
661b60736ecSDimitry Andric BlockIter--;
662b60736ecSDimitry Andric break;
663b60736ecSDimitry Andric }
664b60736ecSDimitry Andric }
665b60736ecSDimitry Andric
666e3b55780SDimitry Andric if (ML && TII->isThroughputPattern(P)) {
667e3b55780SDimitry Andric LLVM_DEBUG(dbgs() << "\t Replacing due to throughput pattern in loop\n");
6687fa27ce4SDimitry Andric insertDeleteInstructions(MBB, MI, InsInstrs, DelInstrs, TraceEnsemble,
669e3b55780SDimitry Andric RegUnits, TII, P, IncrementalUpdate);
670e3b55780SDimitry Andric // Eagerly stop after the first pattern fires.
671e3b55780SDimitry Andric Changed = true;
672e3b55780SDimitry Andric break;
673e3b55780SDimitry Andric } else if (OptForSize && InsInstrs.size() < DelInstrs.size()) {
674e3b55780SDimitry Andric LLVM_DEBUG(dbgs() << "\t Replacing due to OptForSize ("
675e3b55780SDimitry Andric << InsInstrs.size() << " < "
676e3b55780SDimitry Andric << DelInstrs.size() << ")\n");
6777fa27ce4SDimitry Andric insertDeleteInstructions(MBB, MI, InsInstrs, DelInstrs, TraceEnsemble,
678b60736ecSDimitry Andric RegUnits, TII, P, IncrementalUpdate);
6793a0822f0SDimitry Andric // Eagerly stop after the first pattern fires.
68071d5a254SDimitry Andric Changed = true;
68167c32a98SDimitry Andric break;
68267c32a98SDimitry Andric } else {
683044eb2f6SDimitry Andric // For big basic blocks, we only compute the full trace the first time
684044eb2f6SDimitry Andric // we hit this. We do not invalidate the trace, but instead update the
685044eb2f6SDimitry Andric // instruction depths incrementally.
686044eb2f6SDimitry Andric // NOTE: Only the instruction depths up to MI are accurate. All other
687044eb2f6SDimitry Andric // trace information is not updated.
6887fa27ce4SDimitry Andric MachineTraceMetrics::Trace BlockTrace = TraceEnsemble->getTrace(MBB);
689044eb2f6SDimitry Andric Traces->verifyAnalysis();
69071d5a254SDimitry Andric if (improvesCriticalPathLen(MBB, &MI, BlockTrace, InsInstrs, DelInstrs,
691044eb2f6SDimitry Andric InstrIdxForVirtReg, P,
692044eb2f6SDimitry Andric !IncrementalUpdate) &&
69371d5a254SDimitry Andric preservesResourceLen(MBB, BlockTrace, InsInstrs, DelInstrs)) {
694044eb2f6SDimitry Andric if (MBB->size() > inc_threshold) {
695044eb2f6SDimitry Andric // Use incremental depth updates for basic blocks above treshold
696044eb2f6SDimitry Andric IncrementalUpdate = true;
697044eb2f6SDimitry Andric LastUpdate = BlockIter;
698044eb2f6SDimitry Andric }
699044eb2f6SDimitry Andric
7007fa27ce4SDimitry Andric insertDeleteInstructions(MBB, MI, InsInstrs, DelInstrs, TraceEnsemble,
701b60736ecSDimitry Andric RegUnits, TII, P, IncrementalUpdate);
702044eb2f6SDimitry Andric
70371d5a254SDimitry Andric // Eagerly stop after the first pattern fires.
70471d5a254SDimitry Andric Changed = true;
70571d5a254SDimitry Andric break;
70671d5a254SDimitry Andric }
70767c32a98SDimitry Andric // Cleanup instructions of the alternative code sequence. There is no
70867c32a98SDimitry Andric // use for them.
70967c32a98SDimitry Andric MachineFunction *MF = MBB->getParent();
7103a0822f0SDimitry Andric for (auto *InstrPtr : InsInstrs)
71177fc4c14SDimitry Andric MF->deleteMachineInstr(InstrPtr);
71267c32a98SDimitry Andric }
71367c32a98SDimitry Andric InstrIdxForVirtReg.clear();
71467c32a98SDimitry Andric }
71567c32a98SDimitry Andric }
71667c32a98SDimitry Andric
717044eb2f6SDimitry Andric if (Changed && IncrementalUpdate)
718044eb2f6SDimitry Andric Traces->invalidate(MBB);
71967c32a98SDimitry Andric return Changed;
72067c32a98SDimitry Andric }
72167c32a98SDimitry Andric
runOnMachineFunction(MachineFunction & MF)72267c32a98SDimitry Andric bool MachineCombiner::runOnMachineFunction(MachineFunction &MF) {
723eb11fae6SDimitry Andric STI = &MF.getSubtarget();
724eb11fae6SDimitry Andric TII = STI->getInstrInfo();
725eb11fae6SDimitry Andric TRI = STI->getRegisterInfo();
726eb11fae6SDimitry Andric SchedModel = STI->getSchedModel();
727eb11fae6SDimitry Andric TSchedModel.init(STI);
72867c32a98SDimitry Andric MRI = &MF.getRegInfo();
729ac9a064cSDimitry Andric MLI = &getAnalysis<MachineLoopInfoWrapperPass>().getLI();
73067c32a98SDimitry Andric Traces = &getAnalysis<MachineTraceMetrics>();
731706b4fc4SDimitry Andric PSI = &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
732706b4fc4SDimitry Andric MBFI = (PSI && PSI->hasProfileSummary()) ?
733706b4fc4SDimitry Andric &getAnalysis<LazyMachineBlockFrequencyInfoPass>().getBFI() :
734706b4fc4SDimitry Andric nullptr;
7357fa27ce4SDimitry Andric TraceEnsemble = nullptr;
736e6d15924SDimitry Andric OptSize = MF.getFunction().hasOptSize();
737b60736ecSDimitry Andric RegClassInfo.runOnMachineFunction(MF);
73867c32a98SDimitry Andric
739eb11fae6SDimitry Andric LLVM_DEBUG(dbgs() << getPassName() << ": " << MF.getName() << '\n');
74067c32a98SDimitry Andric if (!TII->useMachineCombiner()) {
741eb11fae6SDimitry Andric LLVM_DEBUG(
742eb11fae6SDimitry Andric dbgs()
743eb11fae6SDimitry Andric << " Skipping pass: Target does not support machine combiner\n");
74467c32a98SDimitry Andric return false;
74567c32a98SDimitry Andric }
74667c32a98SDimitry Andric
74767c32a98SDimitry Andric bool Changed = false;
74867c32a98SDimitry Andric
74967c32a98SDimitry Andric // Try to combine instructions.
75067c32a98SDimitry Andric for (auto &MBB : MF)
75167c32a98SDimitry Andric Changed |= combineInstructions(&MBB);
75267c32a98SDimitry Andric
75367c32a98SDimitry Andric return Changed;
75467c32a98SDimitry Andric }
755