14a16efa3SDimitry Andric //===-- TargetInstrInfo.cpp - Target Instruction Information --------------===//
2009b1c42SEd Schouten //
3e6d15924SDimitry Andric // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4e6d15924SDimitry Andric // See https://llvm.org/LICENSE.txt for license information.
5e6d15924SDimitry Andric // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6009b1c42SEd Schouten //
7009b1c42SEd Schouten //===----------------------------------------------------------------------===//
8009b1c42SEd Schouten //
94a16efa3SDimitry Andric // This file implements the TargetInstrInfo class.
10009b1c42SEd Schouten //
11009b1c42SEd Schouten //===----------------------------------------------------------------------===//
12009b1c42SEd Schouten
13044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetInstrInfo.h"
14cfca06d7SDimitry Andric #include "llvm/ADT/StringExtras.h"
15145449b1SDimitry Andric #include "llvm/BinaryFormat/Dwarf.h"
16e3b55780SDimitry Andric #include "llvm/CodeGen/MachineCombinerPattern.h"
174a16efa3SDimitry Andric #include "llvm/CodeGen/MachineFrameInfo.h"
185ca98fd9SDimitry Andric #include "llvm/CodeGen/MachineInstrBuilder.h"
194a16efa3SDimitry Andric #include "llvm/CodeGen/MachineMemOperand.h"
204a16efa3SDimitry Andric #include "llvm/CodeGen/MachineRegisterInfo.h"
21706b4fc4SDimitry Andric #include "llvm/CodeGen/MachineScheduler.h"
227fa27ce4SDimitry Andric #include "llvm/CodeGen/MachineTraceMetrics.h"
234a16efa3SDimitry Andric #include "llvm/CodeGen/PseudoSourceValue.h"
244a16efa3SDimitry Andric #include "llvm/CodeGen/ScoreboardHazardRecognizer.h"
255ca98fd9SDimitry Andric #include "llvm/CodeGen/StackMaps.h"
26044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetFrameLowering.h"
27044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetLowering.h"
28044eb2f6SDimitry Andric #include "llvm/CodeGen/TargetRegisterInfo.h"
293a0822f0SDimitry Andric #include "llvm/CodeGen/TargetSchedule.h"
30f8af5cf6SDimitry Andric #include "llvm/IR/DataLayout.h"
311d5ae102SDimitry Andric #include "llvm/IR/DebugInfoMetadata.h"
324a16efa3SDimitry Andric #include "llvm/MC/MCAsmInfo.h"
334a16efa3SDimitry Andric #include "llvm/MC/MCInstrItineraries.h"
344a16efa3SDimitry Andric #include "llvm/Support/CommandLine.h"
354a16efa3SDimitry Andric #include "llvm/Support/ErrorHandling.h"
364a16efa3SDimitry Andric #include "llvm/Support/raw_ostream.h"
37b1c73532SDimitry Andric #include "llvm/Target/TargetMachine.h"
3801095a5dSDimitry Andric
39009b1c42SEd Schouten using namespace llvm;
40009b1c42SEd Schouten
41cf099d11SDimitry Andric static cl::opt<bool> DisableHazardRecognizer(
42cf099d11SDimitry Andric "disable-sched-hazard", cl::Hidden, cl::init(false),
43cf099d11SDimitry Andric cl::desc("Disable hazard detection during preRA scheduling"));
44cf099d11SDimitry Andric
45145449b1SDimitry Andric TargetInstrInfo::~TargetInstrInfo() = default;
464a16efa3SDimitry Andric
474a16efa3SDimitry Andric const TargetRegisterClass*
getRegClass(const MCInstrDesc & MCID,unsigned OpNum,const TargetRegisterInfo * TRI,const MachineFunction & MF) const484a16efa3SDimitry Andric TargetInstrInfo::getRegClass(const MCInstrDesc &MCID, unsigned OpNum,
494a16efa3SDimitry Andric const TargetRegisterInfo *TRI,
504a16efa3SDimitry Andric const MachineFunction &MF) const {
514a16efa3SDimitry Andric if (OpNum >= MCID.getNumOperands())
525ca98fd9SDimitry Andric return nullptr;
534a16efa3SDimitry Andric
54e3b55780SDimitry Andric short RegClass = MCID.operands()[OpNum].RegClass;
55e3b55780SDimitry Andric if (MCID.operands()[OpNum].isLookupPtrRegClass())
564a16efa3SDimitry Andric return TRI->getPointerRegClass(MF, RegClass);
574a16efa3SDimitry Andric
584a16efa3SDimitry Andric // Instructions like INSERT_SUBREG do not have fixed register classes.
594a16efa3SDimitry Andric if (RegClass < 0)
605ca98fd9SDimitry Andric return nullptr;
614a16efa3SDimitry Andric
624a16efa3SDimitry Andric // Otherwise just look it up normally.
634a16efa3SDimitry Andric return TRI->getRegClass(RegClass);
644a16efa3SDimitry Andric }
654a16efa3SDimitry Andric
664a16efa3SDimitry Andric /// insertNoop - Insert a noop into the instruction stream at the specified
674a16efa3SDimitry Andric /// point.
insertNoop(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI) const684a16efa3SDimitry Andric void TargetInstrInfo::insertNoop(MachineBasicBlock &MBB,
694a16efa3SDimitry Andric MachineBasicBlock::iterator MI) const {
704a16efa3SDimitry Andric llvm_unreachable("Target didn't implement insertNoop!");
714a16efa3SDimitry Andric }
724a16efa3SDimitry Andric
73b60736ecSDimitry Andric /// insertNoops - Insert noops into the instruction stream at the specified
74b60736ecSDimitry Andric /// point.
insertNoops(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI,unsigned Quantity) const75b60736ecSDimitry Andric void TargetInstrInfo::insertNoops(MachineBasicBlock &MBB,
76b60736ecSDimitry Andric MachineBasicBlock::iterator MI,
77b60736ecSDimitry Andric unsigned Quantity) const {
78b60736ecSDimitry Andric for (unsigned i = 0; i < Quantity; ++i)
79b60736ecSDimitry Andric insertNoop(MBB, MI);
80b60736ecSDimitry Andric }
81b60736ecSDimitry Andric
isAsmComment(const char * Str,const MCAsmInfo & MAI)82044eb2f6SDimitry Andric static bool isAsmComment(const char *Str, const MCAsmInfo &MAI) {
83044eb2f6SDimitry Andric return strncmp(Str, MAI.getCommentString().data(),
84044eb2f6SDimitry Andric MAI.getCommentString().size()) == 0;
85044eb2f6SDimitry Andric }
86044eb2f6SDimitry Andric
874a16efa3SDimitry Andric /// Measure the specified inline asm to determine an approximation of its
884a16efa3SDimitry Andric /// length.
894a16efa3SDimitry Andric /// Comments (which run till the next SeparatorString or newline) do not
904a16efa3SDimitry Andric /// count as an instruction.
914a16efa3SDimitry Andric /// Any other non-whitespace text is considered an instruction, with
924a16efa3SDimitry Andric /// multiple instructions separated by SeparatorString or newlines.
934a16efa3SDimitry Andric /// Variable-length instructions are not handled here; this function
944a16efa3SDimitry Andric /// may be overloaded in the target code to do that.
95044eb2f6SDimitry Andric /// We implement a special case of the .space directive which takes only a
96044eb2f6SDimitry Andric /// single integer argument in base 10 that is the size in bytes. This is a
97044eb2f6SDimitry Andric /// restricted form of the GAS directive in that we only interpret
98044eb2f6SDimitry Andric /// simple--i.e. not a logical or arithmetic expression--size values without
99044eb2f6SDimitry Andric /// the optional fill value. This is primarily used for creating arbitrary
100044eb2f6SDimitry Andric /// sized inline asm blocks for testing purposes.
getInlineAsmLength(const char * Str,const MCAsmInfo & MAI,const TargetSubtargetInfo * STI) const101e6d15924SDimitry Andric unsigned TargetInstrInfo::getInlineAsmLength(
102e6d15924SDimitry Andric const char *Str,
103e6d15924SDimitry Andric const MCAsmInfo &MAI, const TargetSubtargetInfo *STI) const {
1044a16efa3SDimitry Andric // Count the number of instructions in the asm.
105044eb2f6SDimitry Andric bool AtInsnStart = true;
106044eb2f6SDimitry Andric unsigned Length = 0;
107e6d15924SDimitry Andric const unsigned MaxInstLength = MAI.getMaxInstLength(STI);
1084a16efa3SDimitry Andric for (; *Str; ++Str) {
1094a16efa3SDimitry Andric if (*Str == '\n' || strncmp(Str, MAI.getSeparatorString(),
11001095a5dSDimitry Andric strlen(MAI.getSeparatorString())) == 0) {
111044eb2f6SDimitry Andric AtInsnStart = true;
112044eb2f6SDimitry Andric } else if (isAsmComment(Str, MAI)) {
11301095a5dSDimitry Andric // Stop counting as an instruction after a comment until the next
11401095a5dSDimitry Andric // separator.
115044eb2f6SDimitry Andric AtInsnStart = false;
1164a16efa3SDimitry Andric }
1174a16efa3SDimitry Andric
118cfca06d7SDimitry Andric if (AtInsnStart && !isSpace(static_cast<unsigned char>(*Str))) {
119e6d15924SDimitry Andric unsigned AddLength = MaxInstLength;
120044eb2f6SDimitry Andric if (strncmp(Str, ".space", 6) == 0) {
121044eb2f6SDimitry Andric char *EStr;
122044eb2f6SDimitry Andric int SpaceSize;
123044eb2f6SDimitry Andric SpaceSize = strtol(Str + 6, &EStr, 10);
124044eb2f6SDimitry Andric SpaceSize = SpaceSize < 0 ? 0 : SpaceSize;
125cfca06d7SDimitry Andric while (*EStr != '\n' && isSpace(static_cast<unsigned char>(*EStr)))
126044eb2f6SDimitry Andric ++EStr;
127044eb2f6SDimitry Andric if (*EStr == '\0' || *EStr == '\n' ||
128044eb2f6SDimitry Andric isAsmComment(EStr, MAI)) // Successfully parsed .space argument
129044eb2f6SDimitry Andric AddLength = SpaceSize;
130044eb2f6SDimitry Andric }
131044eb2f6SDimitry Andric Length += AddLength;
132044eb2f6SDimitry Andric AtInsnStart = false;
13301095a5dSDimitry Andric }
13401095a5dSDimitry Andric }
13501095a5dSDimitry Andric
136044eb2f6SDimitry Andric return Length;
1374a16efa3SDimitry Andric }
1384a16efa3SDimitry Andric
13966e41e3cSRoman Divacky /// ReplaceTailWithBranchTo - Delete the instruction OldInst and everything
14066e41e3cSRoman Divacky /// after it, replacing it with an unconditional branch to NewDest.
14166e41e3cSRoman Divacky void
ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail,MachineBasicBlock * NewDest) const1424a16efa3SDimitry Andric TargetInstrInfo::ReplaceTailWithBranchTo(MachineBasicBlock::iterator Tail,
14366e41e3cSRoman Divacky MachineBasicBlock *NewDest) const {
14466e41e3cSRoman Divacky MachineBasicBlock *MBB = Tail->getParent();
14566e41e3cSRoman Divacky
14666e41e3cSRoman Divacky // Remove all the old successors of MBB from the CFG.
14766e41e3cSRoman Divacky while (!MBB->succ_empty())
14866e41e3cSRoman Divacky MBB->removeSuccessor(MBB->succ_begin());
14966e41e3cSRoman Divacky
15001095a5dSDimitry Andric // Save off the debug loc before erasing the instruction.
15101095a5dSDimitry Andric DebugLoc DL = Tail->getDebugLoc();
15201095a5dSDimitry Andric
153e6d15924SDimitry Andric // Update call site info and remove all the dead instructions
154e6d15924SDimitry Andric // from the end of MBB.
155e6d15924SDimitry Andric while (Tail != MBB->end()) {
156e6d15924SDimitry Andric auto MI = Tail++;
157cfca06d7SDimitry Andric if (MI->shouldUpdateCallSiteInfo())
1581d5ae102SDimitry Andric MBB->getParent()->eraseCallSiteInfo(&*MI);
159e6d15924SDimitry Andric MBB->erase(MI);
160e6d15924SDimitry Andric }
16166e41e3cSRoman Divacky
16266e41e3cSRoman Divacky // If MBB isn't immediately before MBB, insert a branch to it.
16366e41e3cSRoman Divacky if (++MachineFunction::iterator(MBB) != MachineFunction::iterator(NewDest))
164b915e9e0SDimitry Andric insertBranch(*MBB, NewDest, nullptr, SmallVector<MachineOperand, 0>(), DL);
16566e41e3cSRoman Divacky MBB->addSuccessor(NewDest);
16666e41e3cSRoman Divacky }
16766e41e3cSRoman Divacky
commuteInstructionImpl(MachineInstr & MI,bool NewMI,unsigned Idx1,unsigned Idx2) const16801095a5dSDimitry Andric MachineInstr *TargetInstrInfo::commuteInstructionImpl(MachineInstr &MI,
16901095a5dSDimitry Andric bool NewMI, unsigned Idx1,
170dd58ef01SDimitry Andric unsigned Idx2) const {
17101095a5dSDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
172411bd29eSDimitry Andric bool HasDef = MCID.getNumDefs();
17301095a5dSDimitry Andric if (HasDef && !MI.getOperand(0).isReg())
17459850d08SRoman Divacky // No idea how to commute this instruction. Target should implement its own.
1755ca98fd9SDimitry Andric return nullptr;
17618f153bdSEd Schouten
177dd58ef01SDimitry Andric unsigned CommutableOpIdx1 = Idx1; (void)CommutableOpIdx1;
178dd58ef01SDimitry Andric unsigned CommutableOpIdx2 = Idx2; (void)CommutableOpIdx2;
179dd58ef01SDimitry Andric assert(findCommutedOpIndices(MI, CommutableOpIdx1, CommutableOpIdx2) &&
180dd58ef01SDimitry Andric CommutableOpIdx1 == Idx1 && CommutableOpIdx2 == Idx2 &&
181dd58ef01SDimitry Andric "TargetInstrInfo::CommuteInstructionImpl(): not commutable operands.");
18201095a5dSDimitry Andric assert(MI.getOperand(Idx1).isReg() && MI.getOperand(Idx2).isReg() &&
183009b1c42SEd Schouten "This only knows how to commute register operands so far");
184dd58ef01SDimitry Andric
185e6d15924SDimitry Andric Register Reg0 = HasDef ? MI.getOperand(0).getReg() : Register();
186e6d15924SDimitry Andric Register Reg1 = MI.getOperand(Idx1).getReg();
187e6d15924SDimitry Andric Register Reg2 = MI.getOperand(Idx2).getReg();
18801095a5dSDimitry Andric unsigned SubReg0 = HasDef ? MI.getOperand(0).getSubReg() : 0;
18901095a5dSDimitry Andric unsigned SubReg1 = MI.getOperand(Idx1).getSubReg();
19001095a5dSDimitry Andric unsigned SubReg2 = MI.getOperand(Idx2).getSubReg();
19101095a5dSDimitry Andric bool Reg1IsKill = MI.getOperand(Idx1).isKill();
19201095a5dSDimitry Andric bool Reg2IsKill = MI.getOperand(Idx2).isKill();
19301095a5dSDimitry Andric bool Reg1IsUndef = MI.getOperand(Idx1).isUndef();
19401095a5dSDimitry Andric bool Reg2IsUndef = MI.getOperand(Idx2).isUndef();
19501095a5dSDimitry Andric bool Reg1IsInternal = MI.getOperand(Idx1).isInternalRead();
19601095a5dSDimitry Andric bool Reg2IsInternal = MI.getOperand(Idx2).isInternalRead();
197eb11fae6SDimitry Andric // Avoid calling isRenamable for virtual registers since we assert that
198eb11fae6SDimitry Andric // renamable property is only queried/set for physical registers.
199e3b55780SDimitry Andric bool Reg1IsRenamable =
200e3b55780SDimitry Andric Reg1.isPhysical() ? MI.getOperand(Idx1).isRenamable() : false;
201e3b55780SDimitry Andric bool Reg2IsRenamable =
202e3b55780SDimitry Andric Reg2.isPhysical() ? MI.getOperand(Idx2).isRenamable() : false;
20330815c53SDimitry Andric // If destination is tied to either of the commuted source register, then
20430815c53SDimitry Andric // it must be updated.
20530815c53SDimitry Andric if (HasDef && Reg0 == Reg1 &&
20601095a5dSDimitry Andric MI.getDesc().getOperandConstraint(Idx1, MCOI::TIED_TO) == 0) {
207009b1c42SEd Schouten Reg2IsKill = false;
20830815c53SDimitry Andric Reg0 = Reg2;
20963faed5bSDimitry Andric SubReg0 = SubReg2;
21030815c53SDimitry Andric } else if (HasDef && Reg0 == Reg2 &&
21101095a5dSDimitry Andric MI.getDesc().getOperandConstraint(Idx2, MCOI::TIED_TO) == 0) {
21230815c53SDimitry Andric Reg1IsKill = false;
21330815c53SDimitry Andric Reg0 = Reg1;
21463faed5bSDimitry Andric SubReg0 = SubReg1;
215009b1c42SEd Schouten }
216009b1c42SEd Schouten
21701095a5dSDimitry Andric MachineInstr *CommutedMI = nullptr;
218009b1c42SEd Schouten if (NewMI) {
219009b1c42SEd Schouten // Create a new instruction.
220044eb2f6SDimitry Andric MachineFunction &MF = *MI.getMF();
22101095a5dSDimitry Andric CommutedMI = MF.CloneMachineInstr(&MI);
22201095a5dSDimitry Andric } else {
22301095a5dSDimitry Andric CommutedMI = &MI;
224009b1c42SEd Schouten }
225009b1c42SEd Schouten
22663faed5bSDimitry Andric if (HasDef) {
22701095a5dSDimitry Andric CommutedMI->getOperand(0).setReg(Reg0);
22801095a5dSDimitry Andric CommutedMI->getOperand(0).setSubReg(SubReg0);
22963faed5bSDimitry Andric }
23001095a5dSDimitry Andric CommutedMI->getOperand(Idx2).setReg(Reg1);
23101095a5dSDimitry Andric CommutedMI->getOperand(Idx1).setReg(Reg2);
23201095a5dSDimitry Andric CommutedMI->getOperand(Idx2).setSubReg(SubReg1);
23301095a5dSDimitry Andric CommutedMI->getOperand(Idx1).setSubReg(SubReg2);
23401095a5dSDimitry Andric CommutedMI->getOperand(Idx2).setIsKill(Reg1IsKill);
23501095a5dSDimitry Andric CommutedMI->getOperand(Idx1).setIsKill(Reg2IsKill);
23601095a5dSDimitry Andric CommutedMI->getOperand(Idx2).setIsUndef(Reg1IsUndef);
23701095a5dSDimitry Andric CommutedMI->getOperand(Idx1).setIsUndef(Reg2IsUndef);
23801095a5dSDimitry Andric CommutedMI->getOperand(Idx2).setIsInternalRead(Reg1IsInternal);
23901095a5dSDimitry Andric CommutedMI->getOperand(Idx1).setIsInternalRead(Reg2IsInternal);
240eb11fae6SDimitry Andric // Avoid calling setIsRenamable for virtual registers since we assert that
241eb11fae6SDimitry Andric // renamable property is only queried/set for physical registers.
242e3b55780SDimitry Andric if (Reg1.isPhysical())
243eb11fae6SDimitry Andric CommutedMI->getOperand(Idx2).setIsRenamable(Reg1IsRenamable);
244e3b55780SDimitry Andric if (Reg2.isPhysical())
245eb11fae6SDimitry Andric CommutedMI->getOperand(Idx1).setIsRenamable(Reg2IsRenamable);
24601095a5dSDimitry Andric return CommutedMI;
247009b1c42SEd Schouten }
248009b1c42SEd Schouten
commuteInstruction(MachineInstr & MI,bool NewMI,unsigned OpIdx1,unsigned OpIdx2) const24901095a5dSDimitry Andric MachineInstr *TargetInstrInfo::commuteInstruction(MachineInstr &MI, bool NewMI,
250dd58ef01SDimitry Andric unsigned OpIdx1,
251dd58ef01SDimitry Andric unsigned OpIdx2) const {
252dd58ef01SDimitry Andric // If OpIdx1 or OpIdx2 is not specified, then this method is free to choose
253dd58ef01SDimitry Andric // any commutable operand, which is done in findCommutedOpIndices() method
254dd58ef01SDimitry Andric // called below.
255dd58ef01SDimitry Andric if ((OpIdx1 == CommuteAnyOperandIndex || OpIdx2 == CommuteAnyOperandIndex) &&
256dd58ef01SDimitry Andric !findCommutedOpIndices(MI, OpIdx1, OpIdx2)) {
25701095a5dSDimitry Andric assert(MI.isCommutable() &&
258dd58ef01SDimitry Andric "Precondition violation: MI must be commutable.");
259dd58ef01SDimitry Andric return nullptr;
260dd58ef01SDimitry Andric }
261dd58ef01SDimitry Andric return commuteInstructionImpl(MI, NewMI, OpIdx1, OpIdx2);
262dd58ef01SDimitry Andric }
263dd58ef01SDimitry Andric
fixCommutedOpIndices(unsigned & ResultIdx1,unsigned & ResultIdx2,unsigned CommutableOpIdx1,unsigned CommutableOpIdx2)264dd58ef01SDimitry Andric bool TargetInstrInfo::fixCommutedOpIndices(unsigned &ResultIdx1,
265dd58ef01SDimitry Andric unsigned &ResultIdx2,
266dd58ef01SDimitry Andric unsigned CommutableOpIdx1,
267dd58ef01SDimitry Andric unsigned CommutableOpIdx2) {
268dd58ef01SDimitry Andric if (ResultIdx1 == CommuteAnyOperandIndex &&
269dd58ef01SDimitry Andric ResultIdx2 == CommuteAnyOperandIndex) {
270dd58ef01SDimitry Andric ResultIdx1 = CommutableOpIdx1;
271dd58ef01SDimitry Andric ResultIdx2 = CommutableOpIdx2;
272dd58ef01SDimitry Andric } else if (ResultIdx1 == CommuteAnyOperandIndex) {
273dd58ef01SDimitry Andric if (ResultIdx2 == CommutableOpIdx1)
274dd58ef01SDimitry Andric ResultIdx1 = CommutableOpIdx2;
275dd58ef01SDimitry Andric else if (ResultIdx2 == CommutableOpIdx2)
276dd58ef01SDimitry Andric ResultIdx1 = CommutableOpIdx1;
277dd58ef01SDimitry Andric else
278dd58ef01SDimitry Andric return false;
279dd58ef01SDimitry Andric } else if (ResultIdx2 == CommuteAnyOperandIndex) {
280dd58ef01SDimitry Andric if (ResultIdx1 == CommutableOpIdx1)
281dd58ef01SDimitry Andric ResultIdx2 = CommutableOpIdx2;
282dd58ef01SDimitry Andric else if (ResultIdx1 == CommutableOpIdx2)
283dd58ef01SDimitry Andric ResultIdx2 = CommutableOpIdx1;
284dd58ef01SDimitry Andric else
285dd58ef01SDimitry Andric return false;
286dd58ef01SDimitry Andric } else
287dd58ef01SDimitry Andric // Check that the result operand indices match the given commutable
288dd58ef01SDimitry Andric // operand indices.
289dd58ef01SDimitry Andric return (ResultIdx1 == CommutableOpIdx1 && ResultIdx2 == CommutableOpIdx2) ||
290dd58ef01SDimitry Andric (ResultIdx1 == CommutableOpIdx2 && ResultIdx2 == CommutableOpIdx1);
291dd58ef01SDimitry Andric
292dd58ef01SDimitry Andric return true;
293dd58ef01SDimitry Andric }
294dd58ef01SDimitry Andric
findCommutedOpIndices(const MachineInstr & MI,unsigned & SrcOpIdx1,unsigned & SrcOpIdx2) const2951d5ae102SDimitry Andric bool TargetInstrInfo::findCommutedOpIndices(const MachineInstr &MI,
29659850d08SRoman Divacky unsigned &SrcOpIdx1,
29759850d08SRoman Divacky unsigned &SrcOpIdx2) const {
29801095a5dSDimitry Andric assert(!MI.isBundle() &&
2994a16efa3SDimitry Andric "TargetInstrInfo::findCommutedOpIndices() can't handle bundles");
30063faed5bSDimitry Andric
30101095a5dSDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
302411bd29eSDimitry Andric if (!MCID.isCommutable())
30318f153bdSEd Schouten return false;
304dd58ef01SDimitry Andric
30559850d08SRoman Divacky // This assumes v0 = op v1, v2 and commuting would swap v1 and v2. If this
30659850d08SRoman Divacky // is not true, then the target must implement this.
307dd58ef01SDimitry Andric unsigned CommutableOpIdx1 = MCID.getNumDefs();
308dd58ef01SDimitry Andric unsigned CommutableOpIdx2 = CommutableOpIdx1 + 1;
309dd58ef01SDimitry Andric if (!fixCommutedOpIndices(SrcOpIdx1, SrcOpIdx2,
310dd58ef01SDimitry Andric CommutableOpIdx1, CommutableOpIdx2))
311dd58ef01SDimitry Andric return false;
312dd58ef01SDimitry Andric
31301095a5dSDimitry Andric if (!MI.getOperand(SrcOpIdx1).isReg() || !MI.getOperand(SrcOpIdx2).isReg())
31459850d08SRoman Divacky // No idea.
31559850d08SRoman Divacky return false;
316009b1c42SEd Schouten return true;
317009b1c42SEd Schouten }
318009b1c42SEd Schouten
isUnpredicatedTerminator(const MachineInstr & MI) const31901095a5dSDimitry Andric bool TargetInstrInfo::isUnpredicatedTerminator(const MachineInstr &MI) const {
32001095a5dSDimitry Andric if (!MI.isTerminator()) return false;
32163faed5bSDimitry Andric
32263faed5bSDimitry Andric // Conditional branch is a special case.
32301095a5dSDimitry Andric if (MI.isBranch() && !MI.isBarrier())
32463faed5bSDimitry Andric return true;
32501095a5dSDimitry Andric if (!MI.isPredicable())
32663faed5bSDimitry Andric return true;
32763faed5bSDimitry Andric return !isPredicated(MI);
32863faed5bSDimitry Andric }
32963faed5bSDimitry Andric
PredicateInstruction(MachineInstr & MI,ArrayRef<MachineOperand> Pred) const3303a0822f0SDimitry Andric bool TargetInstrInfo::PredicateInstruction(
33101095a5dSDimitry Andric MachineInstr &MI, ArrayRef<MachineOperand> Pred) const {
332009b1c42SEd Schouten bool MadeChange = false;
33363faed5bSDimitry Andric
33401095a5dSDimitry Andric assert(!MI.isBundle() &&
3354a16efa3SDimitry Andric "TargetInstrInfo::PredicateInstruction() can't handle bundles");
33663faed5bSDimitry Andric
33701095a5dSDimitry Andric const MCInstrDesc &MCID = MI.getDesc();
33801095a5dSDimitry Andric if (!MI.isPredicable())
339009b1c42SEd Schouten return false;
340009b1c42SEd Schouten
34101095a5dSDimitry Andric for (unsigned j = 0, i = 0, e = MI.getNumOperands(); i != e; ++i) {
342e3b55780SDimitry Andric if (MCID.operands()[i].isPredicate()) {
34301095a5dSDimitry Andric MachineOperand &MO = MI.getOperand(i);
344009b1c42SEd Schouten if (MO.isReg()) {
345009b1c42SEd Schouten MO.setReg(Pred[j].getReg());
346009b1c42SEd Schouten MadeChange = true;
347009b1c42SEd Schouten } else if (MO.isImm()) {
348009b1c42SEd Schouten MO.setImm(Pred[j].getImm());
349009b1c42SEd Schouten MadeChange = true;
350009b1c42SEd Schouten } else if (MO.isMBB()) {
351009b1c42SEd Schouten MO.setMBB(Pred[j].getMBB());
352009b1c42SEd Schouten MadeChange = true;
353009b1c42SEd Schouten }
354009b1c42SEd Schouten ++j;
355009b1c42SEd Schouten }
356009b1c42SEd Schouten }
357009b1c42SEd Schouten return MadeChange;
358009b1c42SEd Schouten }
359009b1c42SEd Schouten
hasLoadFromStackSlot(const MachineInstr & MI,SmallVectorImpl<const MachineMemOperand * > & Accesses) const360d8e91e46SDimitry Andric bool TargetInstrInfo::hasLoadFromStackSlot(
361d8e91e46SDimitry Andric const MachineInstr &MI,
362d8e91e46SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const {
363d8e91e46SDimitry Andric size_t StartSize = Accesses.size();
36401095a5dSDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(),
36501095a5dSDimitry Andric oe = MI.memoperands_end();
36601095a5dSDimitry Andric o != oe; ++o) {
367d8e91e46SDimitry Andric if ((*o)->isLoad() &&
368c0981da4SDimitry Andric isa_and_nonnull<FixedStackPseudoSourceValue>((*o)->getPseudoValue()))
369d8e91e46SDimitry Andric Accesses.push_back(*o);
37030815c53SDimitry Andric }
371d8e91e46SDimitry Andric return Accesses.size() != StartSize;
37230815c53SDimitry Andric }
37330815c53SDimitry Andric
hasStoreToStackSlot(const MachineInstr & MI,SmallVectorImpl<const MachineMemOperand * > & Accesses) const374d8e91e46SDimitry Andric bool TargetInstrInfo::hasStoreToStackSlot(
375d8e91e46SDimitry Andric const MachineInstr &MI,
376d8e91e46SDimitry Andric SmallVectorImpl<const MachineMemOperand *> &Accesses) const {
377d8e91e46SDimitry Andric size_t StartSize = Accesses.size();
37801095a5dSDimitry Andric for (MachineInstr::mmo_iterator o = MI.memoperands_begin(),
37901095a5dSDimitry Andric oe = MI.memoperands_end();
38001095a5dSDimitry Andric o != oe; ++o) {
381d8e91e46SDimitry Andric if ((*o)->isStore() &&
382c0981da4SDimitry Andric isa_and_nonnull<FixedStackPseudoSourceValue>((*o)->getPseudoValue()))
383d8e91e46SDimitry Andric Accesses.push_back(*o);
38430815c53SDimitry Andric }
385d8e91e46SDimitry Andric return Accesses.size() != StartSize;
38630815c53SDimitry Andric }
38730815c53SDimitry Andric
getStackSlotRange(const TargetRegisterClass * RC,unsigned SubIdx,unsigned & Size,unsigned & Offset,const MachineFunction & MF) const388f8af5cf6SDimitry Andric bool TargetInstrInfo::getStackSlotRange(const TargetRegisterClass *RC,
389f8af5cf6SDimitry Andric unsigned SubIdx, unsigned &Size,
390f8af5cf6SDimitry Andric unsigned &Offset,
3915a5ac124SDimitry Andric const MachineFunction &MF) const {
39212f3ca4cSDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
393f8af5cf6SDimitry Andric if (!SubIdx) {
39412f3ca4cSDimitry Andric Size = TRI->getSpillSize(*RC);
395f8af5cf6SDimitry Andric Offset = 0;
396f8af5cf6SDimitry Andric return true;
397f8af5cf6SDimitry Andric }
3985a5ac124SDimitry Andric unsigned BitSize = TRI->getSubRegIdxSize(SubIdx);
399d8e91e46SDimitry Andric // Convert bit size to byte size.
400f8af5cf6SDimitry Andric if (BitSize % 8)
401f8af5cf6SDimitry Andric return false;
402f8af5cf6SDimitry Andric
4035a5ac124SDimitry Andric int BitOffset = TRI->getSubRegIdxOffset(SubIdx);
404f8af5cf6SDimitry Andric if (BitOffset < 0 || BitOffset % 8)
405f8af5cf6SDimitry Andric return false;
406f8af5cf6SDimitry Andric
4071d5ae102SDimitry Andric Size = BitSize / 8;
408f8af5cf6SDimitry Andric Offset = (unsigned)BitOffset / 8;
409f8af5cf6SDimitry Andric
41012f3ca4cSDimitry Andric assert(TRI->getSpillSize(*RC) >= (Offset + Size) && "bad subregister range");
411f8af5cf6SDimitry Andric
412dd58ef01SDimitry Andric if (!MF.getDataLayout().isLittleEndian()) {
41312f3ca4cSDimitry Andric Offset = TRI->getSpillSize(*RC) - (Offset + Size);
414f8af5cf6SDimitry Andric }
415f8af5cf6SDimitry Andric return true;
416f8af5cf6SDimitry Andric }
417f8af5cf6SDimitry Andric
reMaterialize(MachineBasicBlock & MBB,MachineBasicBlock::iterator I,Register DestReg,unsigned SubIdx,const MachineInstr & Orig,const TargetRegisterInfo & TRI) const4184a16efa3SDimitry Andric void TargetInstrInfo::reMaterialize(MachineBasicBlock &MBB,
419009b1c42SEd Schouten MachineBasicBlock::iterator I,
420cfca06d7SDimitry Andric Register DestReg, unsigned SubIdx,
42101095a5dSDimitry Andric const MachineInstr &Orig,
42266e41e3cSRoman Divacky const TargetRegisterInfo &TRI) const {
42301095a5dSDimitry Andric MachineInstr *MI = MBB.getParent()->CloneMachineInstr(&Orig);
42466e41e3cSRoman Divacky MI->substituteRegister(MI->getOperand(0).getReg(), DestReg, SubIdx, TRI);
425009b1c42SEd Schouten MBB.insert(I, MI);
426009b1c42SEd Schouten }
427009b1c42SEd Schouten
produceSameValue(const MachineInstr & MI0,const MachineInstr & MI1,const MachineRegisterInfo * MRI) const42801095a5dSDimitry Andric bool TargetInstrInfo::produceSameValue(const MachineInstr &MI0,
42901095a5dSDimitry Andric const MachineInstr &MI1,
430cf099d11SDimitry Andric const MachineRegisterInfo *MRI) const {
43101095a5dSDimitry Andric return MI0.isIdenticalTo(MI1, MachineInstr::IgnoreVRegDefs);
43267a71b31SRoman Divacky }
43367a71b31SRoman Divacky
434b1c73532SDimitry Andric MachineInstr &
duplicate(MachineBasicBlock & MBB,MachineBasicBlock::iterator InsertBefore,const MachineInstr & Orig) const435b1c73532SDimitry Andric TargetInstrInfo::duplicate(MachineBasicBlock &MBB,
436b1c73532SDimitry Andric MachineBasicBlock::iterator InsertBefore,
437b1c73532SDimitry Andric const MachineInstr &Orig) const {
438044eb2f6SDimitry Andric MachineFunction &MF = *MBB.getParent();
439b1c73532SDimitry Andric // CFI instructions are marked as non-duplicable, because Darwin compact
440b1c73532SDimitry Andric // unwind info emission can't handle multiple prologue setups.
441b1c73532SDimitry Andric assert((!Orig.isNotDuplicable() ||
442b1c73532SDimitry Andric (!MF.getTarget().getTargetTriple().isOSDarwin() &&
443b1c73532SDimitry Andric Orig.isCFIInstruction())) &&
444b1c73532SDimitry Andric "Instruction cannot be duplicated");
445b1c73532SDimitry Andric
44677fc4c14SDimitry Andric return MF.cloneMachineInstrBundle(MBB, InsertBefore, Orig);
447829000e0SRoman Divacky }
448829000e0SRoman Divacky
44966e41e3cSRoman Divacky // If the COPY instruction in MI can be folded to a stack operation, return
45066e41e3cSRoman Divacky // the register class to use.
canFoldCopy(const MachineInstr & MI,const TargetInstrInfo & TII,unsigned FoldIdx)45101095a5dSDimitry Andric static const TargetRegisterClass *canFoldCopy(const MachineInstr &MI,
4527fa27ce4SDimitry Andric const TargetInstrInfo &TII,
45366e41e3cSRoman Divacky unsigned FoldIdx) {
4547fa27ce4SDimitry Andric assert(TII.isCopyInstr(MI) && "MI must be a COPY instruction");
45501095a5dSDimitry Andric if (MI.getNumOperands() != 2)
4565ca98fd9SDimitry Andric return nullptr;
45766e41e3cSRoman Divacky assert(FoldIdx<2 && "FoldIdx refers no nonexistent operand");
45866e41e3cSRoman Divacky
45901095a5dSDimitry Andric const MachineOperand &FoldOp = MI.getOperand(FoldIdx);
46001095a5dSDimitry Andric const MachineOperand &LiveOp = MI.getOperand(1 - FoldIdx);
46166e41e3cSRoman Divacky
46266e41e3cSRoman Divacky if (FoldOp.getSubReg() || LiveOp.getSubReg())
4635ca98fd9SDimitry Andric return nullptr;
46466e41e3cSRoman Divacky
4651d5ae102SDimitry Andric Register FoldReg = FoldOp.getReg();
4661d5ae102SDimitry Andric Register LiveReg = LiveOp.getReg();
46766e41e3cSRoman Divacky
468e3b55780SDimitry Andric assert(FoldReg.isVirtual() && "Cannot fold physregs");
46966e41e3cSRoman Divacky
470044eb2f6SDimitry Andric const MachineRegisterInfo &MRI = MI.getMF()->getRegInfo();
47166e41e3cSRoman Divacky const TargetRegisterClass *RC = MRI.getRegClass(FoldReg);
47266e41e3cSRoman Divacky
473e3b55780SDimitry Andric if (LiveOp.getReg().isPhysical())
4745ca98fd9SDimitry Andric return RC->contains(LiveOp.getReg()) ? RC : nullptr;
47566e41e3cSRoman Divacky
47656fe8f14SDimitry Andric if (RC->hasSubClassEq(MRI.getRegClass(LiveReg)))
47766e41e3cSRoman Divacky return RC;
47866e41e3cSRoman Divacky
47966e41e3cSRoman Divacky // FIXME: Allow folding when register classes are memory compatible.
4805ca98fd9SDimitry Andric return nullptr;
48166e41e3cSRoman Divacky }
48266e41e3cSRoman Divacky
getNop() const483344a3780SDimitry Andric MCInst TargetInstrInfo::getNop() const { llvm_unreachable("Not implemented"); }
484344a3780SDimitry Andric
485344a3780SDimitry Andric std::pair<unsigned, unsigned>
getPatchpointUnfoldableRange(const MachineInstr & MI) const486344a3780SDimitry Andric TargetInstrInfo::getPatchpointUnfoldableRange(const MachineInstr &MI) const {
487344a3780SDimitry Andric switch (MI.getOpcode()) {
488344a3780SDimitry Andric case TargetOpcode::STACKMAP:
489344a3780SDimitry Andric // StackMapLiveValues are foldable
490344a3780SDimitry Andric return std::make_pair(0, StackMapOpers(&MI).getVarIdx());
491344a3780SDimitry Andric case TargetOpcode::PATCHPOINT:
492344a3780SDimitry Andric // For PatchPoint, the call args are not foldable (even if reported in the
493344a3780SDimitry Andric // stackmap e.g. via anyregcc).
494344a3780SDimitry Andric return std::make_pair(0, PatchPointOpers(&MI).getVarIdx());
495344a3780SDimitry Andric case TargetOpcode::STATEPOINT:
496344a3780SDimitry Andric // For statepoints, fold deopt and gc arguments, but not call arguments.
497344a3780SDimitry Andric return std::make_pair(MI.getNumDefs(), StatepointOpers(&MI).getVarIdx());
498344a3780SDimitry Andric default:
499344a3780SDimitry Andric llvm_unreachable("unexpected stackmap opcode");
500344a3780SDimitry Andric }
50167c32a98SDimitry Andric }
50267c32a98SDimitry Andric
foldPatchpoint(MachineFunction & MF,MachineInstr & MI,ArrayRef<unsigned> Ops,int FrameIndex,const TargetInstrInfo & TII)50301095a5dSDimitry Andric static MachineInstr *foldPatchpoint(MachineFunction &MF, MachineInstr &MI,
5045a5ac124SDimitry Andric ArrayRef<unsigned> Ops, int FrameIndex,
5055ca98fd9SDimitry Andric const TargetInstrInfo &TII) {
5065ca98fd9SDimitry Andric unsigned StartIdx = 0;
507b60736ecSDimitry Andric unsigned NumDefs = 0;
508344a3780SDimitry Andric // getPatchpointUnfoldableRange throws guarantee if MI is not a patchpoint.
509344a3780SDimitry Andric std::tie(NumDefs, StartIdx) = TII.getPatchpointUnfoldableRange(MI);
5105ca98fd9SDimitry Andric
511b60736ecSDimitry Andric unsigned DefToFoldIdx = MI.getNumOperands();
512b60736ecSDimitry Andric
5135ca98fd9SDimitry Andric // Return false if any operands requested for folding are not foldable (not
5145ca98fd9SDimitry Andric // part of the stackmap's live values).
5155a5ac124SDimitry Andric for (unsigned Op : Ops) {
516b60736ecSDimitry Andric if (Op < NumDefs) {
517b60736ecSDimitry Andric assert(DefToFoldIdx == MI.getNumOperands() && "Folding multiple defs");
518b60736ecSDimitry Andric DefToFoldIdx = Op;
519b60736ecSDimitry Andric } else if (Op < StartIdx) {
520b60736ecSDimitry Andric return nullptr;
521b60736ecSDimitry Andric }
522b60736ecSDimitry Andric if (MI.getOperand(Op).isTied())
5235ca98fd9SDimitry Andric return nullptr;
5245ca98fd9SDimitry Andric }
5255ca98fd9SDimitry Andric
5265ca98fd9SDimitry Andric MachineInstr *NewMI =
52701095a5dSDimitry Andric MF.CreateMachineInstr(TII.get(MI.getOpcode()), MI.getDebugLoc(), true);
5285ca98fd9SDimitry Andric MachineInstrBuilder MIB(MF, NewMI);
5295ca98fd9SDimitry Andric
5305ca98fd9SDimitry Andric // No need to fold return, the meta data, and function arguments
5315ca98fd9SDimitry Andric for (unsigned i = 0; i < StartIdx; ++i)
532b60736ecSDimitry Andric if (i != DefToFoldIdx)
53371d5a254SDimitry Andric MIB.add(MI.getOperand(i));
5345ca98fd9SDimitry Andric
535b60736ecSDimitry Andric for (unsigned i = StartIdx, e = MI.getNumOperands(); i < e; ++i) {
53601095a5dSDimitry Andric MachineOperand &MO = MI.getOperand(i);
537b60736ecSDimitry Andric unsigned TiedTo = e;
538b60736ecSDimitry Andric (void)MI.isRegTiedToDefOperand(i, &TiedTo);
539b60736ecSDimitry Andric
540b915e9e0SDimitry Andric if (is_contained(Ops, i)) {
541b60736ecSDimitry Andric assert(TiedTo == e && "Cannot fold tied operands");
5425ca98fd9SDimitry Andric unsigned SpillSize;
5435ca98fd9SDimitry Andric unsigned SpillOffset;
5445ca98fd9SDimitry Andric // Compute the spill slot size and offset.
5455ca98fd9SDimitry Andric const TargetRegisterClass *RC =
5465ca98fd9SDimitry Andric MF.getRegInfo().getRegClass(MO.getReg());
5475a5ac124SDimitry Andric bool Valid =
5485a5ac124SDimitry Andric TII.getStackSlotRange(RC, MO.getSubReg(), SpillSize, SpillOffset, MF);
5495ca98fd9SDimitry Andric if (!Valid)
5505ca98fd9SDimitry Andric report_fatal_error("cannot spill patchpoint subregister operand");
5515ca98fd9SDimitry Andric MIB.addImm(StackMaps::IndirectMemRefOp);
5525ca98fd9SDimitry Andric MIB.addImm(SpillSize);
5535ca98fd9SDimitry Andric MIB.addFrameIndex(FrameIndex);
5545ca98fd9SDimitry Andric MIB.addImm(SpillOffset);
555b60736ecSDimitry Andric } else {
55671d5a254SDimitry Andric MIB.add(MO);
557b60736ecSDimitry Andric if (TiedTo < e) {
558b60736ecSDimitry Andric assert(TiedTo < NumDefs && "Bad tied operand");
559b60736ecSDimitry Andric if (TiedTo > DefToFoldIdx)
560b60736ecSDimitry Andric --TiedTo;
561b60736ecSDimitry Andric NewMI->tieOperands(TiedTo, NewMI->getNumOperands() - 1);
562b60736ecSDimitry Andric }
563b60736ecSDimitry Andric }
5645ca98fd9SDimitry Andric }
5655ca98fd9SDimitry Andric return NewMI;
5665ca98fd9SDimitry Andric }
5675ca98fd9SDimitry Andric
foldInlineAsmMemOperand(MachineInstr * MI,unsigned OpNo,int FI,const TargetInstrInfo & TII)568b1c73532SDimitry Andric static void foldInlineAsmMemOperand(MachineInstr *MI, unsigned OpNo, int FI,
569b1c73532SDimitry Andric const TargetInstrInfo &TII) {
570b1c73532SDimitry Andric // If the machine operand is tied, untie it first.
571b1c73532SDimitry Andric if (MI->getOperand(OpNo).isTied()) {
572b1c73532SDimitry Andric unsigned TiedTo = MI->findTiedOperandIdx(OpNo);
573b1c73532SDimitry Andric MI->untieRegOperand(OpNo);
574b1c73532SDimitry Andric // Intentional recursion!
575b1c73532SDimitry Andric foldInlineAsmMemOperand(MI, TiedTo, FI, TII);
576b1c73532SDimitry Andric }
577b1c73532SDimitry Andric
578b1c73532SDimitry Andric SmallVector<MachineOperand, 5> NewOps;
579b1c73532SDimitry Andric TII.getFrameIndexOperands(NewOps, FI);
580b1c73532SDimitry Andric assert(!NewOps.empty() && "getFrameIndexOperands didn't create any operands");
581b1c73532SDimitry Andric MI->removeOperand(OpNo);
582b1c73532SDimitry Andric MI->insert(MI->operands_begin() + OpNo, NewOps);
583b1c73532SDimitry Andric
584b1c73532SDimitry Andric // Change the previous operand to a MemKind InlineAsm::Flag. The second param
585b1c73532SDimitry Andric // is the per-target number of operands that represent the memory operand
586b1c73532SDimitry Andric // excluding this one (MD). This includes MO.
587b1c73532SDimitry Andric InlineAsm::Flag F(InlineAsm::Kind::Mem, NewOps.size());
588b1c73532SDimitry Andric F.setMemConstraint(InlineAsm::ConstraintCode::m);
589b1c73532SDimitry Andric MachineOperand &MD = MI->getOperand(OpNo - 1);
590b1c73532SDimitry Andric MD.setImm(F);
591b1c73532SDimitry Andric }
592b1c73532SDimitry Andric
593b1c73532SDimitry Andric // Returns nullptr if not possible to fold.
foldInlineAsmMemOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,int FI,const TargetInstrInfo & TII)594b1c73532SDimitry Andric static MachineInstr *foldInlineAsmMemOperand(MachineInstr &MI,
595b1c73532SDimitry Andric ArrayRef<unsigned> Ops, int FI,
596b1c73532SDimitry Andric const TargetInstrInfo &TII) {
597b1c73532SDimitry Andric assert(MI.isInlineAsm() && "wrong opcode");
598b1c73532SDimitry Andric if (Ops.size() > 1)
599b1c73532SDimitry Andric return nullptr;
600b1c73532SDimitry Andric unsigned Op = Ops[0];
601b1c73532SDimitry Andric assert(Op && "should never be first operand");
602b1c73532SDimitry Andric assert(MI.getOperand(Op).isReg() && "shouldn't be folding non-reg operands");
603b1c73532SDimitry Andric
604b1c73532SDimitry Andric if (!MI.mayFoldInlineAsmRegOp(Op))
605b1c73532SDimitry Andric return nullptr;
606b1c73532SDimitry Andric
607b1c73532SDimitry Andric MachineInstr &NewMI = TII.duplicate(*MI.getParent(), MI.getIterator(), MI);
608b1c73532SDimitry Andric
609b1c73532SDimitry Andric foldInlineAsmMemOperand(&NewMI, Op, FI, TII);
610b1c73532SDimitry Andric
611b1c73532SDimitry Andric // Update mayload/maystore metadata, and memoperands.
612b1c73532SDimitry Andric const VirtRegInfo &RI =
613b1c73532SDimitry Andric AnalyzeVirtRegInBundle(MI, MI.getOperand(Op).getReg());
614b1c73532SDimitry Andric MachineOperand &ExtraMO = NewMI.getOperand(InlineAsm::MIOp_ExtraInfo);
615b1c73532SDimitry Andric MachineMemOperand::Flags Flags = MachineMemOperand::MONone;
616b1c73532SDimitry Andric if (RI.Reads) {
617b1c73532SDimitry Andric ExtraMO.setImm(ExtraMO.getImm() | InlineAsm::Extra_MayLoad);
618b1c73532SDimitry Andric Flags |= MachineMemOperand::MOLoad;
619b1c73532SDimitry Andric }
620b1c73532SDimitry Andric if (RI.Writes) {
621b1c73532SDimitry Andric ExtraMO.setImm(ExtraMO.getImm() | InlineAsm::Extra_MayStore);
622b1c73532SDimitry Andric Flags |= MachineMemOperand::MOStore;
623b1c73532SDimitry Andric }
624b1c73532SDimitry Andric MachineFunction *MF = NewMI.getMF();
625b1c73532SDimitry Andric const MachineFrameInfo &MFI = MF->getFrameInfo();
626b1c73532SDimitry Andric MachineMemOperand *MMO = MF->getMachineMemOperand(
627b1c73532SDimitry Andric MachinePointerInfo::getFixedStack(*MF, FI), Flags, MFI.getObjectSize(FI),
628b1c73532SDimitry Andric MFI.getObjectAlign(FI));
629b1c73532SDimitry Andric NewMI.addMemOperand(*MF, MMO);
630b1c73532SDimitry Andric
631b1c73532SDimitry Andric return &NewMI;
632b1c73532SDimitry Andric }
633b1c73532SDimitry Andric
foldMemoryOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,int FI,LiveIntervals * LIS,VirtRegMap * VRM) const63401095a5dSDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI,
63501095a5dSDimitry Andric ArrayRef<unsigned> Ops, int FI,
636e6d15924SDimitry Andric LiveIntervals *LIS,
637e6d15924SDimitry Andric VirtRegMap *VRM) const {
63801095a5dSDimitry Andric auto Flags = MachineMemOperand::MONone;
639044eb2f6SDimitry Andric for (unsigned OpIdx : Ops)
640044eb2f6SDimitry Andric Flags |= MI.getOperand(OpIdx).isDef() ? MachineMemOperand::MOStore
641044eb2f6SDimitry Andric : MachineMemOperand::MOLoad;
642009b1c42SEd Schouten
64301095a5dSDimitry Andric MachineBasicBlock *MBB = MI.getParent();
64466e41e3cSRoman Divacky assert(MBB && "foldMemoryOperand needs an inserted instruction");
64566e41e3cSRoman Divacky MachineFunction &MF = *MBB->getParent();
646009b1c42SEd Schouten
647b915e9e0SDimitry Andric // If we're not folding a load into a subreg, the size of the load is the
648b915e9e0SDimitry Andric // size of the spill slot. But if we are, we need to figure out what the
649b915e9e0SDimitry Andric // actual load size is.
650b915e9e0SDimitry Andric int64_t MemSize = 0;
651b915e9e0SDimitry Andric const MachineFrameInfo &MFI = MF.getFrameInfo();
652b915e9e0SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
653b915e9e0SDimitry Andric
654b915e9e0SDimitry Andric if (Flags & MachineMemOperand::MOStore) {
655b915e9e0SDimitry Andric MemSize = MFI.getObjectSize(FI);
656b915e9e0SDimitry Andric } else {
657044eb2f6SDimitry Andric for (unsigned OpIdx : Ops) {
658b915e9e0SDimitry Andric int64_t OpSize = MFI.getObjectSize(FI);
659b915e9e0SDimitry Andric
660044eb2f6SDimitry Andric if (auto SubReg = MI.getOperand(OpIdx).getSubReg()) {
661b915e9e0SDimitry Andric unsigned SubRegSize = TRI->getSubRegIdxSize(SubReg);
662b915e9e0SDimitry Andric if (SubRegSize > 0 && !(SubRegSize % 8))
663b915e9e0SDimitry Andric OpSize = SubRegSize / 8;
664b915e9e0SDimitry Andric }
665b915e9e0SDimitry Andric
666b915e9e0SDimitry Andric MemSize = std::max(MemSize, OpSize);
667b915e9e0SDimitry Andric }
668b915e9e0SDimitry Andric }
669b915e9e0SDimitry Andric
670b915e9e0SDimitry Andric assert(MemSize && "Did not expect a zero-sized stack slot");
671b915e9e0SDimitry Andric
6725ca98fd9SDimitry Andric MachineInstr *NewMI = nullptr;
6735ca98fd9SDimitry Andric
67401095a5dSDimitry Andric if (MI.getOpcode() == TargetOpcode::STACKMAP ||
675b915e9e0SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
676b915e9e0SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) {
6775ca98fd9SDimitry Andric // Fold stackmap/patchpoint.
6785ca98fd9SDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FI, *this);
67985d8b2bbSDimitry Andric if (NewMI)
68085d8b2bbSDimitry Andric MBB->insert(MI, NewMI);
681b1c73532SDimitry Andric } else if (MI.isInlineAsm()) {
682b1c73532SDimitry Andric return foldInlineAsmMemOperand(MI, Ops, FI, *this);
6835ca98fd9SDimitry Andric } else {
68466e41e3cSRoman Divacky // Ask the target to do the actual folding.
685e6d15924SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, FI, LIS, VRM);
6865ca98fd9SDimitry Andric }
6875ca98fd9SDimitry Andric
6885ca98fd9SDimitry Andric if (NewMI) {
689d8e91e46SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands());
69066e41e3cSRoman Divacky // Add a memory operand, foldMemoryOperandImpl doesn't do that.
691009b1c42SEd Schouten assert((!(Flags & MachineMemOperand::MOStore) ||
69263faed5bSDimitry Andric NewMI->mayStore()) &&
693009b1c42SEd Schouten "Folded a def to a non-store!");
694009b1c42SEd Schouten assert((!(Flags & MachineMemOperand::MOLoad) ||
69563faed5bSDimitry Andric NewMI->mayLoad()) &&
696009b1c42SEd Schouten "Folded a use to a non-load!");
69766e41e3cSRoman Divacky assert(MFI.getObjectOffset(FI) != -1);
698cfca06d7SDimitry Andric MachineMemOperand *MMO =
699cfca06d7SDimitry Andric MF.getMachineMemOperand(MachinePointerInfo::getFixedStack(MF, FI),
700cfca06d7SDimitry Andric Flags, MemSize, MFI.getObjectAlign(FI));
701009b1c42SEd Schouten NewMI->addMemOperand(MF, MMO);
702009b1c42SEd Schouten
703cfca06d7SDimitry Andric // The pass "x86 speculative load hardening" always attaches symbols to
704cfca06d7SDimitry Andric // call instructions. We need copy it form old instruction.
705cfca06d7SDimitry Andric NewMI->cloneInstrSymbols(MF, MI);
706cfca06d7SDimitry Andric
70785d8b2bbSDimitry Andric return NewMI;
70866e41e3cSRoman Divacky }
70966e41e3cSRoman Divacky
71066e41e3cSRoman Divacky // Straight COPY may fold as load/store.
7117fa27ce4SDimitry Andric if (!isCopyInstr(MI) || Ops.size() != 1)
7125ca98fd9SDimitry Andric return nullptr;
71366e41e3cSRoman Divacky
7147fa27ce4SDimitry Andric const TargetRegisterClass *RC = canFoldCopy(MI, *this, Ops[0]);
71566e41e3cSRoman Divacky if (!RC)
7165ca98fd9SDimitry Andric return nullptr;
71766e41e3cSRoman Divacky
71801095a5dSDimitry Andric const MachineOperand &MO = MI.getOperand(1 - Ops[0]);
71966e41e3cSRoman Divacky MachineBasicBlock::iterator Pos = MI;
72066e41e3cSRoman Divacky
72166e41e3cSRoman Divacky if (Flags == MachineMemOperand::MOStore)
722e3b55780SDimitry Andric storeRegToStackSlot(*MBB, Pos, MO.getReg(), MO.isKill(), FI, RC, TRI,
723e3b55780SDimitry Andric Register());
72466e41e3cSRoman Divacky else
725e3b55780SDimitry Andric loadRegFromStackSlot(*MBB, Pos, MO.getReg(), FI, RC, TRI, Register());
72601095a5dSDimitry Andric return &*--Pos;
727009b1c42SEd Schouten }
728009b1c42SEd Schouten
foldMemoryOperand(MachineInstr & MI,ArrayRef<unsigned> Ops,MachineInstr & LoadMI,LiveIntervals * LIS) const729044eb2f6SDimitry Andric MachineInstr *TargetInstrInfo::foldMemoryOperand(MachineInstr &MI,
730044eb2f6SDimitry Andric ArrayRef<unsigned> Ops,
731044eb2f6SDimitry Andric MachineInstr &LoadMI,
732044eb2f6SDimitry Andric LiveIntervals *LIS) const {
733044eb2f6SDimitry Andric assert(LoadMI.canFoldAsLoad() && "LoadMI isn't foldable!");
734044eb2f6SDimitry Andric #ifndef NDEBUG
735044eb2f6SDimitry Andric for (unsigned OpIdx : Ops)
736044eb2f6SDimitry Andric assert(MI.getOperand(OpIdx).isUse() && "Folding load into def!");
737044eb2f6SDimitry Andric #endif
738044eb2f6SDimitry Andric
739044eb2f6SDimitry Andric MachineBasicBlock &MBB = *MI.getParent();
740044eb2f6SDimitry Andric MachineFunction &MF = *MBB.getParent();
741044eb2f6SDimitry Andric
742044eb2f6SDimitry Andric // Ask the target to do the actual folding.
743044eb2f6SDimitry Andric MachineInstr *NewMI = nullptr;
744044eb2f6SDimitry Andric int FrameIndex = 0;
745044eb2f6SDimitry Andric
746044eb2f6SDimitry Andric if ((MI.getOpcode() == TargetOpcode::STACKMAP ||
747044eb2f6SDimitry Andric MI.getOpcode() == TargetOpcode::PATCHPOINT ||
748044eb2f6SDimitry Andric MI.getOpcode() == TargetOpcode::STATEPOINT) &&
749044eb2f6SDimitry Andric isLoadFromStackSlot(LoadMI, FrameIndex)) {
750044eb2f6SDimitry Andric // Fold stackmap/patchpoint.
751044eb2f6SDimitry Andric NewMI = foldPatchpoint(MF, MI, Ops, FrameIndex, *this);
752044eb2f6SDimitry Andric if (NewMI)
753044eb2f6SDimitry Andric NewMI = &*MBB.insert(MI, NewMI);
754b1c73532SDimitry Andric } else if (MI.isInlineAsm() && isLoadFromStackSlot(LoadMI, FrameIndex)) {
755b1c73532SDimitry Andric return foldInlineAsmMemOperand(MI, Ops, FrameIndex, *this);
756044eb2f6SDimitry Andric } else {
757044eb2f6SDimitry Andric // Ask the target to do the actual folding.
758044eb2f6SDimitry Andric NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI, LIS);
759044eb2f6SDimitry Andric }
760044eb2f6SDimitry Andric
761044eb2f6SDimitry Andric if (!NewMI)
762044eb2f6SDimitry Andric return nullptr;
763044eb2f6SDimitry Andric
764044eb2f6SDimitry Andric // Copy the memoperands from the load to the folded instruction.
765044eb2f6SDimitry Andric if (MI.memoperands_empty()) {
766d8e91e46SDimitry Andric NewMI->setMemRefs(MF, LoadMI.memoperands());
767044eb2f6SDimitry Andric } else {
768044eb2f6SDimitry Andric // Handle the rare case of folding multiple loads.
769d8e91e46SDimitry Andric NewMI->setMemRefs(MF, MI.memoperands());
770044eb2f6SDimitry Andric for (MachineInstr::mmo_iterator I = LoadMI.memoperands_begin(),
771044eb2f6SDimitry Andric E = LoadMI.memoperands_end();
772044eb2f6SDimitry Andric I != E; ++I) {
773044eb2f6SDimitry Andric NewMI->addMemOperand(MF, *I);
774044eb2f6SDimitry Andric }
775044eb2f6SDimitry Andric }
776044eb2f6SDimitry Andric return NewMI;
777044eb2f6SDimitry Andric }
778044eb2f6SDimitry Andric
7797fa27ce4SDimitry Andric /// transferImplicitOperands - MI is a pseudo-instruction, and the lowered
7807fa27ce4SDimitry Andric /// replacement instructions immediately precede it. Copy any implicit
7817fa27ce4SDimitry Andric /// operands from MI to the replacement instruction.
transferImplicitOperands(MachineInstr * MI,const TargetRegisterInfo * TRI)7827fa27ce4SDimitry Andric static void transferImplicitOperands(MachineInstr *MI,
7837fa27ce4SDimitry Andric const TargetRegisterInfo *TRI) {
7847fa27ce4SDimitry Andric MachineBasicBlock::iterator CopyMI = MI;
7857fa27ce4SDimitry Andric --CopyMI;
7867fa27ce4SDimitry Andric
7877fa27ce4SDimitry Andric Register DstReg = MI->getOperand(0).getReg();
7887fa27ce4SDimitry Andric for (const MachineOperand &MO : MI->implicit_operands()) {
7897fa27ce4SDimitry Andric CopyMI->addOperand(MO);
7907fa27ce4SDimitry Andric
7917fa27ce4SDimitry Andric // Be conservative about preserving kills when subregister defs are
7927fa27ce4SDimitry Andric // involved. If there was implicit kill of a super-register overlapping the
7937fa27ce4SDimitry Andric // copy result, we would kill the subregisters previous copies defined.
7947fa27ce4SDimitry Andric
7957fa27ce4SDimitry Andric if (MO.isKill() && TRI->regsOverlap(DstReg, MO.getReg()))
7967fa27ce4SDimitry Andric CopyMI->getOperand(CopyMI->getNumOperands() - 1).setIsKill(false);
7977fa27ce4SDimitry Andric }
7987fa27ce4SDimitry Andric }
7997fa27ce4SDimitry Andric
lowerCopy(MachineInstr * MI,const TargetRegisterInfo * TRI) const8007fa27ce4SDimitry Andric void TargetInstrInfo::lowerCopy(MachineInstr *MI,
8017fa27ce4SDimitry Andric const TargetRegisterInfo *TRI) const {
8027fa27ce4SDimitry Andric if (MI->allDefsAreDead()) {
8037fa27ce4SDimitry Andric MI->setDesc(get(TargetOpcode::KILL));
8047fa27ce4SDimitry Andric return;
8057fa27ce4SDimitry Andric }
8067fa27ce4SDimitry Andric
8077fa27ce4SDimitry Andric MachineOperand &DstMO = MI->getOperand(0);
8087fa27ce4SDimitry Andric MachineOperand &SrcMO = MI->getOperand(1);
8097fa27ce4SDimitry Andric
8107fa27ce4SDimitry Andric bool IdentityCopy = (SrcMO.getReg() == DstMO.getReg());
8117fa27ce4SDimitry Andric if (IdentityCopy || SrcMO.isUndef()) {
8127fa27ce4SDimitry Andric // No need to insert an identity copy instruction, but replace with a KILL
8137fa27ce4SDimitry Andric // if liveness is changed.
8147fa27ce4SDimitry Andric if (SrcMO.isUndef() || MI->getNumOperands() > 2) {
8157fa27ce4SDimitry Andric // We must make sure the super-register gets killed. Replace the
8167fa27ce4SDimitry Andric // instruction with KILL.
8177fa27ce4SDimitry Andric MI->setDesc(get(TargetOpcode::KILL));
8187fa27ce4SDimitry Andric return;
8197fa27ce4SDimitry Andric }
8207fa27ce4SDimitry Andric // Vanilla identity copy.
8217fa27ce4SDimitry Andric MI->eraseFromParent();
8227fa27ce4SDimitry Andric return;
8237fa27ce4SDimitry Andric }
8247fa27ce4SDimitry Andric
8257fa27ce4SDimitry Andric copyPhysReg(*MI->getParent(), MI, MI->getDebugLoc(), DstMO.getReg(),
8267fa27ce4SDimitry Andric SrcMO.getReg(), SrcMO.isKill());
8277fa27ce4SDimitry Andric
8287fa27ce4SDimitry Andric if (MI->getNumOperands() > 2)
8297fa27ce4SDimitry Andric transferImplicitOperands(MI, TRI);
8307fa27ce4SDimitry Andric MI->eraseFromParent();
8317fa27ce4SDimitry Andric }
8327fa27ce4SDimitry Andric
hasReassociableOperands(const MachineInstr & Inst,const MachineBasicBlock * MBB) const833dd58ef01SDimitry Andric bool TargetInstrInfo::hasReassociableOperands(
834dd58ef01SDimitry Andric const MachineInstr &Inst, const MachineBasicBlock *MBB) const {
835dd58ef01SDimitry Andric const MachineOperand &Op1 = Inst.getOperand(1);
836dd58ef01SDimitry Andric const MachineOperand &Op2 = Inst.getOperand(2);
837dd58ef01SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
838dd58ef01SDimitry Andric
839dd58ef01SDimitry Andric // We need virtual register definitions for the operands that we will
840dd58ef01SDimitry Andric // reassociate.
841dd58ef01SDimitry Andric MachineInstr *MI1 = nullptr;
842dd58ef01SDimitry Andric MachineInstr *MI2 = nullptr;
843e3b55780SDimitry Andric if (Op1.isReg() && Op1.getReg().isVirtual())
844dd58ef01SDimitry Andric MI1 = MRI.getUniqueVRegDef(Op1.getReg());
845e3b55780SDimitry Andric if (Op2.isReg() && Op2.getReg().isVirtual())
846dd58ef01SDimitry Andric MI2 = MRI.getUniqueVRegDef(Op2.getReg());
847dd58ef01SDimitry Andric
848e3b55780SDimitry Andric // And at least one operand must be defined in MBB.
849e3b55780SDimitry Andric return MI1 && MI2 && (MI1->getParent() == MBB || MI2->getParent() == MBB);
850e3b55780SDimitry Andric }
851e3b55780SDimitry Andric
areOpcodesEqualOrInverse(unsigned Opcode1,unsigned Opcode2) const852e3b55780SDimitry Andric bool TargetInstrInfo::areOpcodesEqualOrInverse(unsigned Opcode1,
853e3b55780SDimitry Andric unsigned Opcode2) const {
854e3b55780SDimitry Andric return Opcode1 == Opcode2 || getInverseOpcode(Opcode1) == Opcode2;
855dd58ef01SDimitry Andric }
856dd58ef01SDimitry Andric
hasReassociableSibling(const MachineInstr & Inst,bool & Commuted) const857dd58ef01SDimitry Andric bool TargetInstrInfo::hasReassociableSibling(const MachineInstr &Inst,
858dd58ef01SDimitry Andric bool &Commuted) const {
859dd58ef01SDimitry Andric const MachineBasicBlock *MBB = Inst.getParent();
860dd58ef01SDimitry Andric const MachineRegisterInfo &MRI = MBB->getParent()->getRegInfo();
861dd58ef01SDimitry Andric MachineInstr *MI1 = MRI.getUniqueVRegDef(Inst.getOperand(1).getReg());
862dd58ef01SDimitry Andric MachineInstr *MI2 = MRI.getUniqueVRegDef(Inst.getOperand(2).getReg());
863e3b55780SDimitry Andric unsigned Opcode = Inst.getOpcode();
864dd58ef01SDimitry Andric
865e3b55780SDimitry Andric // If only one operand has the same or inverse opcode and it's the second
866e3b55780SDimitry Andric // source operand, the operands must be commuted.
867e3b55780SDimitry Andric Commuted = !areOpcodesEqualOrInverse(Opcode, MI1->getOpcode()) &&
868e3b55780SDimitry Andric areOpcodesEqualOrInverse(Opcode, MI2->getOpcode());
869dd58ef01SDimitry Andric if (Commuted)
870dd58ef01SDimitry Andric std::swap(MI1, MI2);
871dd58ef01SDimitry Andric
872dd58ef01SDimitry Andric // 1. The previous instruction must be the same type as Inst.
873e3b55780SDimitry Andric // 2. The previous instruction must also be associative/commutative or be the
874e3b55780SDimitry Andric // inverse of such an operation (this can be different even for
875e3b55780SDimitry Andric // instructions with the same opcode if traits like fast-math-flags are
876e3b55780SDimitry Andric // included).
877cfca06d7SDimitry Andric // 3. The previous instruction must have virtual register definitions for its
878dd58ef01SDimitry Andric // operands in the same basic block as Inst.
879cfca06d7SDimitry Andric // 4. The previous instruction's result must only be used by Inst.
880e3b55780SDimitry Andric return areOpcodesEqualOrInverse(Opcode, MI1->getOpcode()) &&
881e3b55780SDimitry Andric (isAssociativeAndCommutative(*MI1) ||
882e3b55780SDimitry Andric isAssociativeAndCommutative(*MI1, /* Invert */ true)) &&
883dd58ef01SDimitry Andric hasReassociableOperands(*MI1, MBB) &&
884dd58ef01SDimitry Andric MRI.hasOneNonDBGUse(MI1->getOperand(0).getReg());
885dd58ef01SDimitry Andric }
886dd58ef01SDimitry Andric
887e3b55780SDimitry Andric // 1. The operation must be associative and commutative or be the inverse of
888e3b55780SDimitry Andric // such an operation.
889dd58ef01SDimitry Andric // 2. The instruction must have virtual register definitions for its
890dd58ef01SDimitry Andric // operands in the same basic block.
891dd58ef01SDimitry Andric // 3. The instruction must have a reassociable sibling.
isReassociationCandidate(const MachineInstr & Inst,bool & Commuted) const892dd58ef01SDimitry Andric bool TargetInstrInfo::isReassociationCandidate(const MachineInstr &Inst,
893dd58ef01SDimitry Andric bool &Commuted) const {
894e3b55780SDimitry Andric return (isAssociativeAndCommutative(Inst) ||
895e3b55780SDimitry Andric isAssociativeAndCommutative(Inst, /* Invert */ true)) &&
896dd58ef01SDimitry Andric hasReassociableOperands(Inst, Inst.getParent()) &&
897dd58ef01SDimitry Andric hasReassociableSibling(Inst, Commuted);
898dd58ef01SDimitry Andric }
899dd58ef01SDimitry Andric
900dd58ef01SDimitry Andric // The concept of the reassociation pass is that these operations can benefit
901dd58ef01SDimitry Andric // from this kind of transformation:
902dd58ef01SDimitry Andric //
903dd58ef01SDimitry Andric // A = ? op ?
904dd58ef01SDimitry Andric // B = A op X (Prev)
905dd58ef01SDimitry Andric // C = B op Y (Root)
906dd58ef01SDimitry Andric // -->
907dd58ef01SDimitry Andric // A = ? op ?
908dd58ef01SDimitry Andric // B = X op Y
909dd58ef01SDimitry Andric // C = A op B
910dd58ef01SDimitry Andric //
911dd58ef01SDimitry Andric // breaking the dependency between A and B, allowing them to be executed in
912dd58ef01SDimitry Andric // parallel (or back-to-back in a pipeline) instead of depending on each other.
913dd58ef01SDimitry Andric
914dd58ef01SDimitry Andric // FIXME: This has the potential to be expensive (compile time) while not
915dd58ef01SDimitry Andric // improving the code at all. Some ways to limit the overhead:
916dd58ef01SDimitry Andric // 1. Track successful transforms; bail out if hit rate gets too low.
917dd58ef01SDimitry Andric // 2. Only enable at -O3 or some other non-default optimization level.
918dd58ef01SDimitry Andric // 3. Pre-screen pattern candidates here: if an operand of the previous
919dd58ef01SDimitry Andric // instruction is known to not increase the critical path, then don't match
920dd58ef01SDimitry Andric // that pattern.
getMachineCombinerPatterns(MachineInstr & Root,SmallVectorImpl<unsigned> & Patterns,bool DoRegPressureReduce) const921dd58ef01SDimitry Andric bool TargetInstrInfo::getMachineCombinerPatterns(
922ac9a064cSDimitry Andric MachineInstr &Root, SmallVectorImpl<unsigned> &Patterns,
923b60736ecSDimitry Andric bool DoRegPressureReduce) const {
924dd58ef01SDimitry Andric bool Commute;
925dd58ef01SDimitry Andric if (isReassociationCandidate(Root, Commute)) {
926dd58ef01SDimitry Andric // We found a sequence of instructions that may be suitable for a
927dd58ef01SDimitry Andric // reassociation of operands to increase ILP. Specify each commutation
928dd58ef01SDimitry Andric // possibility for the Prev instruction in the sequence and let the
929dd58ef01SDimitry Andric // machine combiner decide if changing the operands is worthwhile.
930dd58ef01SDimitry Andric if (Commute) {
931dd58ef01SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_YB);
932dd58ef01SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_YB);
933dd58ef01SDimitry Andric } else {
934dd58ef01SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_AX_BY);
935dd58ef01SDimitry Andric Patterns.push_back(MachineCombinerPattern::REASSOC_XA_BY);
936dd58ef01SDimitry Andric }
937dd58ef01SDimitry Andric return true;
938dd58ef01SDimitry Andric }
939dd58ef01SDimitry Andric
940dd58ef01SDimitry Andric return false;
941dd58ef01SDimitry Andric }
942044eb2f6SDimitry Andric
94301095a5dSDimitry Andric /// Return true when a code sequence can improve loop throughput.
isThroughputPattern(unsigned Pattern) const944ac9a064cSDimitry Andric bool TargetInstrInfo::isThroughputPattern(unsigned Pattern) const {
94501095a5dSDimitry Andric return false;
94601095a5dSDimitry Andric }
947044eb2f6SDimitry Andric
948ac9a064cSDimitry Andric CombinerObjective
getCombinerObjective(unsigned Pattern) const949ac9a064cSDimitry Andric TargetInstrInfo::getCombinerObjective(unsigned Pattern) const {
950ac9a064cSDimitry Andric return CombinerObjective::Default;
951ac9a064cSDimitry Andric }
952ac9a064cSDimitry Andric
953e3b55780SDimitry Andric std::pair<unsigned, unsigned>
getReassociationOpcodes(unsigned Pattern,const MachineInstr & Root,const MachineInstr & Prev) const954ac9a064cSDimitry Andric TargetInstrInfo::getReassociationOpcodes(unsigned Pattern,
955e3b55780SDimitry Andric const MachineInstr &Root,
956e3b55780SDimitry Andric const MachineInstr &Prev) const {
957e3b55780SDimitry Andric bool AssocCommutRoot = isAssociativeAndCommutative(Root);
958e3b55780SDimitry Andric bool AssocCommutPrev = isAssociativeAndCommutative(Prev);
959e3b55780SDimitry Andric
960e3b55780SDimitry Andric // Early exit if both opcodes are associative and commutative. It's a trivial
961e3b55780SDimitry Andric // reassociation when we only change operands order. In this case opcodes are
962e3b55780SDimitry Andric // not required to have inverse versions.
963e3b55780SDimitry Andric if (AssocCommutRoot && AssocCommutPrev) {
964e3b55780SDimitry Andric assert(Root.getOpcode() == Prev.getOpcode() && "Expected to be equal");
965e3b55780SDimitry Andric return std::make_pair(Root.getOpcode(), Root.getOpcode());
966e3b55780SDimitry Andric }
967e3b55780SDimitry Andric
968e3b55780SDimitry Andric // At least one instruction is not associative or commutative.
969e3b55780SDimitry Andric // Since we have matched one of the reassociation patterns, we expect that the
970e3b55780SDimitry Andric // instructions' opcodes are equal or one of them is the inversion of the
971e3b55780SDimitry Andric // other.
972e3b55780SDimitry Andric assert(areOpcodesEqualOrInverse(Root.getOpcode(), Prev.getOpcode()) &&
973e3b55780SDimitry Andric "Incorrectly matched pattern");
974e3b55780SDimitry Andric unsigned AssocCommutOpcode = Root.getOpcode();
975e3b55780SDimitry Andric unsigned InverseOpcode = *getInverseOpcode(Root.getOpcode());
976e3b55780SDimitry Andric if (!AssocCommutRoot)
977e3b55780SDimitry Andric std::swap(AssocCommutOpcode, InverseOpcode);
978e3b55780SDimitry Andric
979e3b55780SDimitry Andric // The transformation rule (`+` is any associative and commutative binary
980e3b55780SDimitry Andric // operation, `-` is the inverse):
981e3b55780SDimitry Andric // REASSOC_AX_BY:
982e3b55780SDimitry Andric // (A + X) + Y => A + (X + Y)
983e3b55780SDimitry Andric // (A + X) - Y => A + (X - Y)
984e3b55780SDimitry Andric // (A - X) + Y => A - (X - Y)
985e3b55780SDimitry Andric // (A - X) - Y => A - (X + Y)
986e3b55780SDimitry Andric // REASSOC_XA_BY:
987e3b55780SDimitry Andric // (X + A) + Y => (X + Y) + A
988e3b55780SDimitry Andric // (X + A) - Y => (X - Y) + A
989e3b55780SDimitry Andric // (X - A) + Y => (X + Y) - A
990e3b55780SDimitry Andric // (X - A) - Y => (X - Y) - A
991e3b55780SDimitry Andric // REASSOC_AX_YB:
992e3b55780SDimitry Andric // Y + (A + X) => (Y + X) + A
993e3b55780SDimitry Andric // Y - (A + X) => (Y - X) - A
994e3b55780SDimitry Andric // Y + (A - X) => (Y - X) + A
995e3b55780SDimitry Andric // Y - (A - X) => (Y + X) - A
996e3b55780SDimitry Andric // REASSOC_XA_YB:
997e3b55780SDimitry Andric // Y + (X + A) => (Y + X) + A
998e3b55780SDimitry Andric // Y - (X + A) => (Y - X) - A
999e3b55780SDimitry Andric // Y + (X - A) => (Y + X) - A
1000e3b55780SDimitry Andric // Y - (X - A) => (Y - X) + A
1001e3b55780SDimitry Andric switch (Pattern) {
1002e3b55780SDimitry Andric default:
1003e3b55780SDimitry Andric llvm_unreachable("Unexpected pattern");
1004e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
1005e3b55780SDimitry Andric if (!AssocCommutRoot && AssocCommutPrev)
1006e3b55780SDimitry Andric return {AssocCommutOpcode, InverseOpcode};
1007e3b55780SDimitry Andric if (AssocCommutRoot && !AssocCommutPrev)
1008e3b55780SDimitry Andric return {InverseOpcode, InverseOpcode};
1009e3b55780SDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev)
1010e3b55780SDimitry Andric return {InverseOpcode, AssocCommutOpcode};
1011e3b55780SDimitry Andric break;
1012e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
1013e3b55780SDimitry Andric if (!AssocCommutRoot && AssocCommutPrev)
1014e3b55780SDimitry Andric return {AssocCommutOpcode, InverseOpcode};
1015e3b55780SDimitry Andric if (AssocCommutRoot && !AssocCommutPrev)
1016e3b55780SDimitry Andric return {InverseOpcode, AssocCommutOpcode};
1017e3b55780SDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev)
1018e3b55780SDimitry Andric return {InverseOpcode, InverseOpcode};
1019e3b55780SDimitry Andric break;
1020e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
1021e3b55780SDimitry Andric if (!AssocCommutRoot && AssocCommutPrev)
1022e3b55780SDimitry Andric return {InverseOpcode, InverseOpcode};
1023e3b55780SDimitry Andric if (AssocCommutRoot && !AssocCommutPrev)
1024e3b55780SDimitry Andric return {AssocCommutOpcode, InverseOpcode};
1025e3b55780SDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev)
1026e3b55780SDimitry Andric return {InverseOpcode, AssocCommutOpcode};
1027e3b55780SDimitry Andric break;
1028e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
1029e3b55780SDimitry Andric if (!AssocCommutRoot && AssocCommutPrev)
1030e3b55780SDimitry Andric return {InverseOpcode, InverseOpcode};
1031e3b55780SDimitry Andric if (AssocCommutRoot && !AssocCommutPrev)
1032e3b55780SDimitry Andric return {InverseOpcode, AssocCommutOpcode};
1033e3b55780SDimitry Andric if (!AssocCommutRoot && !AssocCommutPrev)
1034e3b55780SDimitry Andric return {AssocCommutOpcode, InverseOpcode};
1035e3b55780SDimitry Andric break;
1036e3b55780SDimitry Andric }
1037e3b55780SDimitry Andric llvm_unreachable("Unhandled combination");
1038e3b55780SDimitry Andric }
1039e3b55780SDimitry Andric
1040e3b55780SDimitry Andric // Return a pair of boolean flags showing if the new root and new prev operands
1041e3b55780SDimitry Andric // must be swapped. See visual example of the rule in
1042e3b55780SDimitry Andric // TargetInstrInfo::getReassociationOpcodes.
mustSwapOperands(unsigned Pattern)1043ac9a064cSDimitry Andric static std::pair<bool, bool> mustSwapOperands(unsigned Pattern) {
1044e3b55780SDimitry Andric switch (Pattern) {
1045e3b55780SDimitry Andric default:
1046e3b55780SDimitry Andric llvm_unreachable("Unexpected pattern");
1047e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
1048e3b55780SDimitry Andric return {false, false};
1049e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
1050e3b55780SDimitry Andric return {true, false};
1051e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
1052e3b55780SDimitry Andric return {true, true};
1053e3b55780SDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
1054e3b55780SDimitry Andric return {true, true};
1055e3b55780SDimitry Andric }
1056e3b55780SDimitry Andric }
1057e3b55780SDimitry Andric
getReassociateOperandIndices(const MachineInstr & Root,unsigned Pattern,std::array<unsigned,5> & OperandIndices) const1058ac9a064cSDimitry Andric void TargetInstrInfo::getReassociateOperandIndices(
1059ac9a064cSDimitry Andric const MachineInstr &Root, unsigned Pattern,
1060ac9a064cSDimitry Andric std::array<unsigned, 5> &OperandIndices) const {
1061ac9a064cSDimitry Andric switch (Pattern) {
1062ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
1063ac9a064cSDimitry Andric OperandIndices = {1, 1, 1, 2, 2};
1064ac9a064cSDimitry Andric break;
1065ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
1066ac9a064cSDimitry Andric OperandIndices = {2, 1, 2, 2, 1};
1067ac9a064cSDimitry Andric break;
1068ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
1069ac9a064cSDimitry Andric OperandIndices = {1, 2, 1, 1, 2};
1070ac9a064cSDimitry Andric break;
1071ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
1072ac9a064cSDimitry Andric OperandIndices = {2, 2, 2, 1, 1};
1073ac9a064cSDimitry Andric break;
1074ac9a064cSDimitry Andric default:
1075ac9a064cSDimitry Andric llvm_unreachable("unexpected MachineCombinerPattern");
1076ac9a064cSDimitry Andric }
1077ac9a064cSDimitry Andric }
1078ac9a064cSDimitry Andric
1079dd58ef01SDimitry Andric /// Attempt the reassociation transformation to reduce critical path length.
1080dd58ef01SDimitry Andric /// See the above comments before getMachineCombinerPatterns().
reassociateOps(MachineInstr & Root,MachineInstr & Prev,unsigned Pattern,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,ArrayRef<unsigned> OperandIndices,DenseMap<unsigned,unsigned> & InstrIdxForVirtReg) const1081dd58ef01SDimitry Andric void TargetInstrInfo::reassociateOps(
1082ac9a064cSDimitry Andric MachineInstr &Root, MachineInstr &Prev, unsigned Pattern,
1083dd58ef01SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
1084dd58ef01SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
1085ac9a064cSDimitry Andric ArrayRef<unsigned> OperandIndices,
1086dd58ef01SDimitry Andric DenseMap<unsigned, unsigned> &InstrIdxForVirtReg) const {
1087044eb2f6SDimitry Andric MachineFunction *MF = Root.getMF();
1088dd58ef01SDimitry Andric MachineRegisterInfo &MRI = MF->getRegInfo();
1089dd58ef01SDimitry Andric const TargetInstrInfo *TII = MF->getSubtarget().getInstrInfo();
1090dd58ef01SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
1091dd58ef01SDimitry Andric const TargetRegisterClass *RC = Root.getRegClassConstraint(0, TII, TRI);
1092dd58ef01SDimitry Andric
1093ac9a064cSDimitry Andric MachineOperand &OpA = Prev.getOperand(OperandIndices[1]);
1094ac9a064cSDimitry Andric MachineOperand &OpB = Root.getOperand(OperandIndices[2]);
1095ac9a064cSDimitry Andric MachineOperand &OpX = Prev.getOperand(OperandIndices[3]);
1096ac9a064cSDimitry Andric MachineOperand &OpY = Root.getOperand(OperandIndices[4]);
1097dd58ef01SDimitry Andric MachineOperand &OpC = Root.getOperand(0);
1098dd58ef01SDimitry Andric
10991d5ae102SDimitry Andric Register RegA = OpA.getReg();
11001d5ae102SDimitry Andric Register RegB = OpB.getReg();
11011d5ae102SDimitry Andric Register RegX = OpX.getReg();
11021d5ae102SDimitry Andric Register RegY = OpY.getReg();
11031d5ae102SDimitry Andric Register RegC = OpC.getReg();
1104dd58ef01SDimitry Andric
1105e3b55780SDimitry Andric if (RegA.isVirtual())
1106dd58ef01SDimitry Andric MRI.constrainRegClass(RegA, RC);
1107e3b55780SDimitry Andric if (RegB.isVirtual())
1108dd58ef01SDimitry Andric MRI.constrainRegClass(RegB, RC);
1109e3b55780SDimitry Andric if (RegX.isVirtual())
1110dd58ef01SDimitry Andric MRI.constrainRegClass(RegX, RC);
1111e3b55780SDimitry Andric if (RegY.isVirtual())
1112dd58ef01SDimitry Andric MRI.constrainRegClass(RegY, RC);
1113e3b55780SDimitry Andric if (RegC.isVirtual())
1114dd58ef01SDimitry Andric MRI.constrainRegClass(RegC, RC);
1115dd58ef01SDimitry Andric
1116dd58ef01SDimitry Andric // Create a new virtual register for the result of (X op Y) instead of
1117dd58ef01SDimitry Andric // recycling RegB because the MachineCombiner's computation of the critical
1118dd58ef01SDimitry Andric // path requires a new register definition rather than an existing one.
11191d5ae102SDimitry Andric Register NewVR = MRI.createVirtualRegister(RC);
1120dd58ef01SDimitry Andric InstrIdxForVirtReg.insert(std::make_pair(NewVR, 0));
1121dd58ef01SDimitry Andric
1122e3b55780SDimitry Andric auto [NewRootOpc, NewPrevOpc] = getReassociationOpcodes(Pattern, Root, Prev);
1123dd58ef01SDimitry Andric bool KillA = OpA.isKill();
1124dd58ef01SDimitry Andric bool KillX = OpX.isKill();
1125dd58ef01SDimitry Andric bool KillY = OpY.isKill();
1126e3b55780SDimitry Andric bool KillNewVR = true;
1127e3b55780SDimitry Andric
1128e3b55780SDimitry Andric auto [SwapRootOperands, SwapPrevOperands] = mustSwapOperands(Pattern);
1129e3b55780SDimitry Andric
1130e3b55780SDimitry Andric if (SwapPrevOperands) {
1131e3b55780SDimitry Andric std::swap(RegX, RegY);
1132e3b55780SDimitry Andric std::swap(KillX, KillY);
1133e3b55780SDimitry Andric }
1134dd58ef01SDimitry Andric
1135ac9a064cSDimitry Andric unsigned PrevFirstOpIdx, PrevSecondOpIdx;
1136ac9a064cSDimitry Andric unsigned RootFirstOpIdx, RootSecondOpIdx;
1137ac9a064cSDimitry Andric switch (Pattern) {
1138ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_AX_BY:
1139ac9a064cSDimitry Andric PrevFirstOpIdx = OperandIndices[1];
1140ac9a064cSDimitry Andric PrevSecondOpIdx = OperandIndices[3];
1141ac9a064cSDimitry Andric RootFirstOpIdx = OperandIndices[2];
1142ac9a064cSDimitry Andric RootSecondOpIdx = OperandIndices[4];
1143ac9a064cSDimitry Andric break;
1144ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_AX_YB:
1145ac9a064cSDimitry Andric PrevFirstOpIdx = OperandIndices[1];
1146ac9a064cSDimitry Andric PrevSecondOpIdx = OperandIndices[3];
1147ac9a064cSDimitry Andric RootFirstOpIdx = OperandIndices[4];
1148ac9a064cSDimitry Andric RootSecondOpIdx = OperandIndices[2];
1149ac9a064cSDimitry Andric break;
1150ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_XA_BY:
1151ac9a064cSDimitry Andric PrevFirstOpIdx = OperandIndices[3];
1152ac9a064cSDimitry Andric PrevSecondOpIdx = OperandIndices[1];
1153ac9a064cSDimitry Andric RootFirstOpIdx = OperandIndices[2];
1154ac9a064cSDimitry Andric RootSecondOpIdx = OperandIndices[4];
1155ac9a064cSDimitry Andric break;
1156ac9a064cSDimitry Andric case MachineCombinerPattern::REASSOC_XA_YB:
1157ac9a064cSDimitry Andric PrevFirstOpIdx = OperandIndices[3];
1158ac9a064cSDimitry Andric PrevSecondOpIdx = OperandIndices[1];
1159ac9a064cSDimitry Andric RootFirstOpIdx = OperandIndices[4];
1160ac9a064cSDimitry Andric RootSecondOpIdx = OperandIndices[2];
1161ac9a064cSDimitry Andric break;
1162ac9a064cSDimitry Andric default:
1163ac9a064cSDimitry Andric llvm_unreachable("unexpected MachineCombinerPattern");
1164ac9a064cSDimitry Andric }
1165ac9a064cSDimitry Andric
1166ac9a064cSDimitry Andric // Basically BuildMI but doesn't add implicit operands by default.
1167ac9a064cSDimitry Andric auto buildMINoImplicit = [](MachineFunction &MF, const MIMetadata &MIMD,
1168ac9a064cSDimitry Andric const MCInstrDesc &MCID, Register DestReg) {
1169ac9a064cSDimitry Andric return MachineInstrBuilder(
1170ac9a064cSDimitry Andric MF, MF.CreateMachineInstr(MCID, MIMD.getDL(), /*NoImpl=*/true))
1171ac9a064cSDimitry Andric .setPCSections(MIMD.getPCSections())
1172ac9a064cSDimitry Andric .addReg(DestReg, RegState::Define);
1173ac9a064cSDimitry Andric };
1174ac9a064cSDimitry Andric
1175dd58ef01SDimitry Andric // Create new instructions for insertion.
1176dd58ef01SDimitry Andric MachineInstrBuilder MIB1 =
1177ac9a064cSDimitry Andric buildMINoImplicit(*MF, MIMetadata(Prev), TII->get(NewPrevOpc), NewVR);
1178ac9a064cSDimitry Andric for (const auto &MO : Prev.explicit_operands()) {
1179ac9a064cSDimitry Andric unsigned Idx = MO.getOperandNo();
1180ac9a064cSDimitry Andric // Skip the result operand we'd already added.
1181ac9a064cSDimitry Andric if (Idx == 0)
1182ac9a064cSDimitry Andric continue;
1183ac9a064cSDimitry Andric if (Idx == PrevFirstOpIdx)
1184ac9a064cSDimitry Andric MIB1.addReg(RegX, getKillRegState(KillX));
1185ac9a064cSDimitry Andric else if (Idx == PrevSecondOpIdx)
1186ac9a064cSDimitry Andric MIB1.addReg(RegY, getKillRegState(KillY));
1187ac9a064cSDimitry Andric else
1188ac9a064cSDimitry Andric MIB1.add(MO);
1189ac9a064cSDimitry Andric }
1190ac9a064cSDimitry Andric MIB1.copyImplicitOps(Prev);
1191e3b55780SDimitry Andric
1192e3b55780SDimitry Andric if (SwapRootOperands) {
1193e3b55780SDimitry Andric std::swap(RegA, NewVR);
1194e3b55780SDimitry Andric std::swap(KillA, KillNewVR);
1195e3b55780SDimitry Andric }
1196e3b55780SDimitry Andric
1197dd58ef01SDimitry Andric MachineInstrBuilder MIB2 =
1198ac9a064cSDimitry Andric buildMINoImplicit(*MF, MIMetadata(Root), TII->get(NewRootOpc), RegC);
1199ac9a064cSDimitry Andric for (const auto &MO : Root.explicit_operands()) {
1200ac9a064cSDimitry Andric unsigned Idx = MO.getOperandNo();
1201ac9a064cSDimitry Andric // Skip the result operand.
1202ac9a064cSDimitry Andric if (Idx == 0)
1203ac9a064cSDimitry Andric continue;
1204ac9a064cSDimitry Andric if (Idx == RootFirstOpIdx)
1205ac9a064cSDimitry Andric MIB2 = MIB2.addReg(RegA, getKillRegState(KillA));
1206ac9a064cSDimitry Andric else if (Idx == RootSecondOpIdx)
1207ac9a064cSDimitry Andric MIB2 = MIB2.addReg(NewVR, getKillRegState(KillNewVR));
1208ac9a064cSDimitry Andric else
1209ac9a064cSDimitry Andric MIB2 = MIB2.add(MO);
1210ac9a064cSDimitry Andric }
1211ac9a064cSDimitry Andric MIB2.copyImplicitOps(Root);
1212b1c73532SDimitry Andric
1213b1c73532SDimitry Andric // Propagate FP flags from the original instructions.
1214b1c73532SDimitry Andric // But clear poison-generating flags because those may not be valid now.
1215b1c73532SDimitry Andric // TODO: There should be a helper function for copying only fast-math-flags.
1216b1c73532SDimitry Andric uint32_t IntersectedFlags = Root.getFlags() & Prev.getFlags();
1217b1c73532SDimitry Andric MIB1->setFlags(IntersectedFlags);
1218b1c73532SDimitry Andric MIB1->clearFlag(MachineInstr::MIFlag::NoSWrap);
1219b1c73532SDimitry Andric MIB1->clearFlag(MachineInstr::MIFlag::NoUWrap);
1220b1c73532SDimitry Andric MIB1->clearFlag(MachineInstr::MIFlag::IsExact);
1221b1c73532SDimitry Andric
1222b1c73532SDimitry Andric MIB2->setFlags(IntersectedFlags);
1223b1c73532SDimitry Andric MIB2->clearFlag(MachineInstr::MIFlag::NoSWrap);
1224b1c73532SDimitry Andric MIB2->clearFlag(MachineInstr::MIFlag::NoUWrap);
1225b1c73532SDimitry Andric MIB2->clearFlag(MachineInstr::MIFlag::IsExact);
1226dd58ef01SDimitry Andric
1227dd58ef01SDimitry Andric setSpecialOperandAttr(Root, Prev, *MIB1, *MIB2);
1228dd58ef01SDimitry Andric
1229dd58ef01SDimitry Andric // Record new instructions for insertion and old instructions for deletion.
1230dd58ef01SDimitry Andric InsInstrs.push_back(MIB1);
1231dd58ef01SDimitry Andric InsInstrs.push_back(MIB2);
1232dd58ef01SDimitry Andric DelInstrs.push_back(&Prev);
1233dd58ef01SDimitry Andric DelInstrs.push_back(&Root);
12347fa27ce4SDimitry Andric
12357fa27ce4SDimitry Andric // We transformed:
12367fa27ce4SDimitry Andric // B = A op X (Prev)
12377fa27ce4SDimitry Andric // C = B op Y (Root)
12387fa27ce4SDimitry Andric // Into:
12397fa27ce4SDimitry Andric // B = X op Y (MIB1)
12407fa27ce4SDimitry Andric // C = A op B (MIB2)
12417fa27ce4SDimitry Andric // C has the same value as before, B doesn't; as such, keep the debug number
12427fa27ce4SDimitry Andric // of C but not of B.
12437fa27ce4SDimitry Andric if (unsigned OldRootNum = Root.peekDebugInstrNum())
12447fa27ce4SDimitry Andric MIB2.getInstr()->setDebugInstrNum(OldRootNum);
1245dd58ef01SDimitry Andric }
1246dd58ef01SDimitry Andric
genAlternativeCodeSequence(MachineInstr & Root,unsigned Pattern,SmallVectorImpl<MachineInstr * > & InsInstrs,SmallVectorImpl<MachineInstr * > & DelInstrs,DenseMap<unsigned,unsigned> & InstIdxForVirtReg) const1247dd58ef01SDimitry Andric void TargetInstrInfo::genAlternativeCodeSequence(
1248ac9a064cSDimitry Andric MachineInstr &Root, unsigned Pattern,
1249dd58ef01SDimitry Andric SmallVectorImpl<MachineInstr *> &InsInstrs,
1250dd58ef01SDimitry Andric SmallVectorImpl<MachineInstr *> &DelInstrs,
1251dd58ef01SDimitry Andric DenseMap<unsigned, unsigned> &InstIdxForVirtReg) const {
1252044eb2f6SDimitry Andric MachineRegisterInfo &MRI = Root.getMF()->getRegInfo();
1253dd58ef01SDimitry Andric
1254dd58ef01SDimitry Andric // Select the previous instruction in the sequence based on the input pattern.
1255ac9a064cSDimitry Andric std::array<unsigned, 5> OperandIndices;
1256ac9a064cSDimitry Andric getReassociateOperandIndices(Root, Pattern, OperandIndices);
1257ac9a064cSDimitry Andric MachineInstr *Prev =
1258ac9a064cSDimitry Andric MRI.getUniqueVRegDef(Root.getOperand(OperandIndices[0]).getReg());
1259dd58ef01SDimitry Andric
1260e3b55780SDimitry Andric // Don't reassociate if Prev and Root are in different blocks.
1261e3b55780SDimitry Andric if (Prev->getParent() != Root.getParent())
1262e3b55780SDimitry Andric return;
1263e3b55780SDimitry Andric
1264ac9a064cSDimitry Andric reassociateOps(Root, *Prev, Pattern, InsInstrs, DelInstrs, OperandIndices,
1265ac9a064cSDimitry Andric InstIdxForVirtReg);
1266dd58ef01SDimitry Andric }
1267dd58ef01SDimitry Andric
getMachineCombinerTraceStrategy() const12687fa27ce4SDimitry Andric MachineTraceStrategy TargetInstrInfo::getMachineCombinerTraceStrategy() const {
12697fa27ce4SDimitry Andric return MachineTraceStrategy::TS_MinInstrCount;
12707fa27ce4SDimitry Andric }
12717fa27ce4SDimitry Andric
isReallyTriviallyReMaterializable(const MachineInstr & MI) const1272b1c73532SDimitry Andric bool TargetInstrInfo::isReallyTriviallyReMaterializable(
12734b4fe385SDimitry Andric const MachineInstr &MI) const {
1274044eb2f6SDimitry Andric const MachineFunction &MF = *MI.getMF();
127559850d08SRoman Divacky const MachineRegisterInfo &MRI = MF.getRegInfo();
127659850d08SRoman Divacky
127730815c53SDimitry Andric // Remat clients assume operand 0 is the defined register.
127801095a5dSDimitry Andric if (!MI.getNumOperands() || !MI.getOperand(0).isReg())
127930815c53SDimitry Andric return false;
12801d5ae102SDimitry Andric Register DefReg = MI.getOperand(0).getReg();
128130815c53SDimitry Andric
128230815c53SDimitry Andric // A sub-register definition can only be rematerialized if the instruction
128330815c53SDimitry Andric // doesn't read the other parts of the register. Otherwise it is really a
128430815c53SDimitry Andric // read-modify-write operation on the full virtual register which cannot be
128530815c53SDimitry Andric // moved safely.
1286e3b55780SDimitry Andric if (DefReg.isVirtual() && MI.getOperand(0).getSubReg() &&
12871d5ae102SDimitry Andric MI.readsVirtualRegister(DefReg))
128830815c53SDimitry Andric return false;
128930815c53SDimitry Andric
129059850d08SRoman Divacky // A load from a fixed stack slot can be rematerialized. This may be
129159850d08SRoman Divacky // redundant with subsequent checks, but it's target-independent,
129259850d08SRoman Divacky // simple, and a common case.
129359850d08SRoman Divacky int FrameIdx = 0;
129467c32a98SDimitry Andric if (isLoadFromStackSlot(MI, FrameIdx) &&
1295b915e9e0SDimitry Andric MF.getFrameInfo().isImmutableObjectIndex(FrameIdx))
129659850d08SRoman Divacky return true;
129759850d08SRoman Divacky
129859850d08SRoman Divacky // Avoid instructions obviously unsafe for remat.
1299e6d15924SDimitry Andric if (MI.isNotDuplicable() || MI.mayStore() || MI.mayRaiseFPException() ||
1300e6d15924SDimitry Andric MI.hasUnmodeledSideEffects())
1301cf099d11SDimitry Andric return false;
1302cf099d11SDimitry Andric
1303cf099d11SDimitry Andric // Don't remat inline asm. We have no idea how expensive it is
1304cf099d11SDimitry Andric // even if it's side effect free.
130501095a5dSDimitry Andric if (MI.isInlineAsm())
130659850d08SRoman Divacky return false;
130759850d08SRoman Divacky
130859850d08SRoman Divacky // Avoid instructions which load from potentially varying memory.
13094b4fe385SDimitry Andric if (MI.mayLoad() && !MI.isDereferenceableInvariantLoad())
131059850d08SRoman Divacky return false;
131159850d08SRoman Divacky
131259850d08SRoman Divacky // If any of the registers accessed are non-constant, conservatively assume
131359850d08SRoman Divacky // the instruction is not rematerializable.
1314f65dcba8SDimitry Andric for (const MachineOperand &MO : MI.operands()) {
131559850d08SRoman Divacky if (!MO.isReg()) continue;
13161d5ae102SDimitry Andric Register Reg = MO.getReg();
131759850d08SRoman Divacky if (Reg == 0)
131859850d08SRoman Divacky continue;
131959850d08SRoman Divacky
132059850d08SRoman Divacky // Check for a well-behaved physical register.
1321e3b55780SDimitry Andric if (Reg.isPhysical()) {
132259850d08SRoman Divacky if (MO.isUse()) {
132359850d08SRoman Divacky // If the physreg has no defs anywhere, it's just an ambient register
132459850d08SRoman Divacky // and we can freely move its uses. Alternatively, if it's allocatable,
132559850d08SRoman Divacky // it could get allocated to something with a def during allocation.
1326b915e9e0SDimitry Andric if (!MRI.isConstantPhysReg(Reg))
132759850d08SRoman Divacky return false;
132859850d08SRoman Divacky } else {
132959850d08SRoman Divacky // A physreg def. We can't remat it.
133059850d08SRoman Divacky return false;
133159850d08SRoman Divacky }
133259850d08SRoman Divacky continue;
133359850d08SRoman Divacky }
133459850d08SRoman Divacky
133530815c53SDimitry Andric // Only allow one virtual-register def. There may be multiple defs of the
133630815c53SDimitry Andric // same virtual register, though.
133730815c53SDimitry Andric if (MO.isDef() && Reg != DefReg)
133859850d08SRoman Divacky return false;
133959850d08SRoman Divacky
134059850d08SRoman Divacky // Don't allow any virtual-register uses. Rematting an instruction with
134159850d08SRoman Divacky // virtual register uses would length the live ranges of the uses, which
134259850d08SRoman Divacky // is not necessarily a good idea, certainly not "trivial".
134359850d08SRoman Divacky if (MO.isUse())
134459850d08SRoman Divacky return false;
134559850d08SRoman Divacky }
134659850d08SRoman Divacky
134759850d08SRoman Divacky // Everything checked out.
134859850d08SRoman Divacky return true;
134959850d08SRoman Divacky }
135066e41e3cSRoman Divacky
getSPAdjust(const MachineInstr & MI) const135101095a5dSDimitry Andric int TargetInstrInfo::getSPAdjust(const MachineInstr &MI) const {
1352044eb2f6SDimitry Andric const MachineFunction *MF = MI.getMF();
135367c32a98SDimitry Andric const TargetFrameLowering *TFI = MF->getSubtarget().getFrameLowering();
135467c32a98SDimitry Andric bool StackGrowsDown =
135567c32a98SDimitry Andric TFI->getStackGrowthDirection() == TargetFrameLowering::StackGrowsDown;
135667c32a98SDimitry Andric
13575a5ac124SDimitry Andric unsigned FrameSetupOpcode = getCallFrameSetupOpcode();
13585a5ac124SDimitry Andric unsigned FrameDestroyOpcode = getCallFrameDestroyOpcode();
135967c32a98SDimitry Andric
136071d5a254SDimitry Andric if (!isFrameInstr(MI))
136167c32a98SDimitry Andric return 0;
136267c32a98SDimitry Andric
136371d5a254SDimitry Andric int SPAdj = TFI->alignSPAdjust(getFrameSize(MI));
136467c32a98SDimitry Andric
136501095a5dSDimitry Andric if ((!StackGrowsDown && MI.getOpcode() == FrameSetupOpcode) ||
136601095a5dSDimitry Andric (StackGrowsDown && MI.getOpcode() == FrameDestroyOpcode))
136767c32a98SDimitry Andric SPAdj = -SPAdj;
136867c32a98SDimitry Andric
136967c32a98SDimitry Andric return SPAdj;
137067c32a98SDimitry Andric }
137167c32a98SDimitry Andric
137266e41e3cSRoman Divacky /// isSchedulingBoundary - Test if the given instruction should be
137366e41e3cSRoman Divacky /// considered a scheduling boundary. This primarily includes labels
137466e41e3cSRoman Divacky /// and terminators.
isSchedulingBoundary(const MachineInstr & MI,const MachineBasicBlock * MBB,const MachineFunction & MF) const137501095a5dSDimitry Andric bool TargetInstrInfo::isSchedulingBoundary(const MachineInstr &MI,
137666e41e3cSRoman Divacky const MachineBasicBlock *MBB,
137766e41e3cSRoman Divacky const MachineFunction &MF) const {
137866e41e3cSRoman Divacky // Terminators and labels can't be scheduled around.
137901095a5dSDimitry Andric if (MI.isTerminator() || MI.isPosition())
138066e41e3cSRoman Divacky return true;
138166e41e3cSRoman Divacky
1382cfca06d7SDimitry Andric // INLINEASM_BR can jump to another block
1383cfca06d7SDimitry Andric if (MI.getOpcode() == TargetOpcode::INLINEASM_BR)
1384cfca06d7SDimitry Andric return true;
1385cfca06d7SDimitry Andric
138666e41e3cSRoman Divacky // Don't attempt to schedule around any instruction that defines
138766e41e3cSRoman Divacky // a stack-oriented pointer, as it's unlikely to be profitable. This
138866e41e3cSRoman Divacky // saves compile time, because it doesn't require every single
138966e41e3cSRoman Divacky // stack slot reference to depend on the instruction that does the
139066e41e3cSRoman Divacky // modification.
139167c32a98SDimitry Andric const TargetLowering &TLI = *MF.getSubtarget().getTargetLowering();
139267c32a98SDimitry Andric const TargetRegisterInfo *TRI = MF.getSubtarget().getRegisterInfo();
139301095a5dSDimitry Andric return MI.modifiesRegister(TLI.getStackPointerRegisterToSaveRestore(), TRI);
139466e41e3cSRoman Divacky }
139566e41e3cSRoman Divacky
1396cf099d11SDimitry Andric // Provide a global flag for disabling the PreRA hazard recognizer that targets
1397cf099d11SDimitry Andric // may choose to honor.
usePreRAHazardRecognizer() const13984a16efa3SDimitry Andric bool TargetInstrInfo::usePreRAHazardRecognizer() const {
1399cf099d11SDimitry Andric return !DisableHazardRecognizer;
1400cf099d11SDimitry Andric }
1401cf099d11SDimitry Andric
1402cf099d11SDimitry Andric // Default implementation of CreateTargetRAHazardRecognizer.
14034a16efa3SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetHazardRecognizer(const TargetSubtargetInfo * STI,const ScheduleDAG * DAG) const14045ca98fd9SDimitry Andric CreateTargetHazardRecognizer(const TargetSubtargetInfo *STI,
1405cf099d11SDimitry Andric const ScheduleDAG *DAG) const {
1406cf099d11SDimitry Andric // Dummy hazard recognizer allows all instructions to issue.
1407cf099d11SDimitry Andric return new ScheduleHazardRecognizer();
1408cf099d11SDimitry Andric }
1409cf099d11SDimitry Andric
141058b69754SDimitry Andric // Default implementation of CreateTargetMIHazardRecognizer.
CreateTargetMIHazardRecognizer(const InstrItineraryData * II,const ScheduleDAGMI * DAG) const1411706b4fc4SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::CreateTargetMIHazardRecognizer(
1412706b4fc4SDimitry Andric const InstrItineraryData *II, const ScheduleDAGMI *DAG) const {
1413706b4fc4SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "machine-scheduler");
141458b69754SDimitry Andric }
141558b69754SDimitry Andric
141666e41e3cSRoman Divacky // Default implementation of CreateTargetPostRAHazardRecognizer.
14174a16efa3SDimitry Andric ScheduleHazardRecognizer *TargetInstrInfo::
CreateTargetPostRAHazardRecognizer(const InstrItineraryData * II,const ScheduleDAG * DAG) const1418cf099d11SDimitry Andric CreateTargetPostRAHazardRecognizer(const InstrItineraryData *II,
1419cf099d11SDimitry Andric const ScheduleDAG *DAG) const {
1420706b4fc4SDimitry Andric return new ScoreboardHazardRecognizer(II, DAG, "post-RA-sched");
142166e41e3cSRoman Divacky }
142263faed5bSDimitry Andric
1423cfca06d7SDimitry Andric // Default implementation of getMemOperandWithOffset.
getMemOperandWithOffset(const MachineInstr & MI,const MachineOperand * & BaseOp,int64_t & Offset,bool & OffsetIsScalable,const TargetRegisterInfo * TRI) const1424cfca06d7SDimitry Andric bool TargetInstrInfo::getMemOperandWithOffset(
1425cfca06d7SDimitry Andric const MachineInstr &MI, const MachineOperand *&BaseOp, int64_t &Offset,
1426cfca06d7SDimitry Andric bool &OffsetIsScalable, const TargetRegisterInfo *TRI) const {
1427cfca06d7SDimitry Andric SmallVector<const MachineOperand *, 4> BaseOps;
1428ac9a064cSDimitry Andric LocationSize Width = 0;
1429cfca06d7SDimitry Andric if (!getMemOperandsWithOffsetWidth(MI, BaseOps, Offset, OffsetIsScalable,
1430cfca06d7SDimitry Andric Width, TRI) ||
1431cfca06d7SDimitry Andric BaseOps.size() != 1)
1432cfca06d7SDimitry Andric return false;
1433cfca06d7SDimitry Andric BaseOp = BaseOps.front();
1434cfca06d7SDimitry Andric return true;
1435cfca06d7SDimitry Andric }
1436cfca06d7SDimitry Andric
143758b69754SDimitry Andric //===----------------------------------------------------------------------===//
143858b69754SDimitry Andric // SelectionDAG latency interface.
143958b69754SDimitry Andric //===----------------------------------------------------------------------===//
144058b69754SDimitry Andric
1441b1c73532SDimitry Andric std::optional<unsigned>
getOperandLatency(const InstrItineraryData * ItinData,SDNode * DefNode,unsigned DefIdx,SDNode * UseNode,unsigned UseIdx) const14424a16efa3SDimitry Andric TargetInstrInfo::getOperandLatency(const InstrItineraryData *ItinData,
144363faed5bSDimitry Andric SDNode *DefNode, unsigned DefIdx,
144463faed5bSDimitry Andric SDNode *UseNode, unsigned UseIdx) const {
144563faed5bSDimitry Andric if (!ItinData || ItinData->isEmpty())
1446b1c73532SDimitry Andric return std::nullopt;
144763faed5bSDimitry Andric
144863faed5bSDimitry Andric if (!DefNode->isMachineOpcode())
1449b1c73532SDimitry Andric return std::nullopt;
145063faed5bSDimitry Andric
145163faed5bSDimitry Andric unsigned DefClass = get(DefNode->getMachineOpcode()).getSchedClass();
145263faed5bSDimitry Andric if (!UseNode->isMachineOpcode())
145363faed5bSDimitry Andric return ItinData->getOperandCycle(DefClass, DefIdx);
145463faed5bSDimitry Andric unsigned UseClass = get(UseNode->getMachineOpcode()).getSchedClass();
145563faed5bSDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx);
145663faed5bSDimitry Andric }
145763faed5bSDimitry Andric
getInstrLatency(const InstrItineraryData * ItinData,SDNode * N) const1458b1c73532SDimitry Andric unsigned TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData,
145963faed5bSDimitry Andric SDNode *N) const {
146063faed5bSDimitry Andric if (!ItinData || ItinData->isEmpty())
146163faed5bSDimitry Andric return 1;
146263faed5bSDimitry Andric
146363faed5bSDimitry Andric if (!N->isMachineOpcode())
146463faed5bSDimitry Andric return 1;
146563faed5bSDimitry Andric
146663faed5bSDimitry Andric return ItinData->getStageLatency(get(N->getMachineOpcode()).getSchedClass());
146763faed5bSDimitry Andric }
146863faed5bSDimitry Andric
146958b69754SDimitry Andric //===----------------------------------------------------------------------===//
147058b69754SDimitry Andric // MachineInstr latency interface.
147158b69754SDimitry Andric //===----------------------------------------------------------------------===//
147258b69754SDimitry Andric
getNumMicroOps(const InstrItineraryData * ItinData,const MachineInstr & MI) const147301095a5dSDimitry Andric unsigned TargetInstrInfo::getNumMicroOps(const InstrItineraryData *ItinData,
147401095a5dSDimitry Andric const MachineInstr &MI) const {
147558b69754SDimitry Andric if (!ItinData || ItinData->isEmpty())
147658b69754SDimitry Andric return 1;
147758b69754SDimitry Andric
147801095a5dSDimitry Andric unsigned Class = MI.getDesc().getSchedClass();
147958b69754SDimitry Andric int UOps = ItinData->Itineraries[Class].NumMicroOps;
148058b69754SDimitry Andric if (UOps >= 0)
148158b69754SDimitry Andric return UOps;
148258b69754SDimitry Andric
148358b69754SDimitry Andric // The # of u-ops is dynamically determined. The specific target should
148458b69754SDimitry Andric // override this function to return the right number.
148558b69754SDimitry Andric return 1;
148658b69754SDimitry Andric }
148758b69754SDimitry Andric
148858b69754SDimitry Andric /// Return the default expected latency for a def based on it's opcode.
defaultDefLatency(const MCSchedModel & SchedModel,const MachineInstr & DefMI) const148967c32a98SDimitry Andric unsigned TargetInstrInfo::defaultDefLatency(const MCSchedModel &SchedModel,
149001095a5dSDimitry Andric const MachineInstr &DefMI) const {
149101095a5dSDimitry Andric if (DefMI.isTransient())
1492522600a2SDimitry Andric return 0;
149301095a5dSDimitry Andric if (DefMI.mayLoad())
149467c32a98SDimitry Andric return SchedModel.LoadLatency;
149501095a5dSDimitry Andric if (isHighLatencyDef(DefMI.getOpcode()))
149667c32a98SDimitry Andric return SchedModel.HighLatency;
149758b69754SDimitry Andric return 1;
149858b69754SDimitry Andric }
149958b69754SDimitry Andric
getPredicationCost(const MachineInstr &) const150001095a5dSDimitry Andric unsigned TargetInstrInfo::getPredicationCost(const MachineInstr &) const {
1501f8af5cf6SDimitry Andric return 0;
1502f8af5cf6SDimitry Andric }
1503f8af5cf6SDimitry Andric
getInstrLatency(const InstrItineraryData * ItinData,const MachineInstr & MI,unsigned * PredCost) const150401095a5dSDimitry Andric unsigned TargetInstrInfo::getInstrLatency(const InstrItineraryData *ItinData,
150501095a5dSDimitry Andric const MachineInstr &MI,
150658b69754SDimitry Andric unsigned *PredCost) const {
150758b69754SDimitry Andric // Default to one cycle for no itinerary. However, an "empty" itinerary may
150858b69754SDimitry Andric // still have a MinLatency property, which getStageLatency checks.
150958b69754SDimitry Andric if (!ItinData)
151001095a5dSDimitry Andric return MI.mayLoad() ? 2 : 1;
151158b69754SDimitry Andric
151201095a5dSDimitry Andric return ItinData->getStageLatency(MI.getDesc().getSchedClass());
151358b69754SDimitry Andric }
151458b69754SDimitry Andric
hasLowDefLatency(const TargetSchedModel & SchedModel,const MachineInstr & DefMI,unsigned DefIdx) const15153a0822f0SDimitry Andric bool TargetInstrInfo::hasLowDefLatency(const TargetSchedModel &SchedModel,
151601095a5dSDimitry Andric const MachineInstr &DefMI,
151758b69754SDimitry Andric unsigned DefIdx) const {
15183a0822f0SDimitry Andric const InstrItineraryData *ItinData = SchedModel.getInstrItineraries();
151958b69754SDimitry Andric if (!ItinData || ItinData->isEmpty())
152058b69754SDimitry Andric return false;
152158b69754SDimitry Andric
152201095a5dSDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass();
1523b1c73532SDimitry Andric std::optional<unsigned> DefCycle =
1524b1c73532SDimitry Andric ItinData->getOperandCycle(DefClass, DefIdx);
1525b1c73532SDimitry Andric return DefCycle && DefCycle <= 1U;
1526b1c73532SDimitry Andric }
1527b1c73532SDimitry Andric
isFunctionSafeToSplit(const MachineFunction & MF) const1528b1c73532SDimitry Andric bool TargetInstrInfo::isFunctionSafeToSplit(const MachineFunction &MF) const {
1529b1c73532SDimitry Andric // TODO: We don't split functions where a section attribute has been set
1530b1c73532SDimitry Andric // since the split part may not be placed in a contiguous region. It may also
1531b1c73532SDimitry Andric // be more beneficial to augment the linker to ensure contiguous layout of
1532b1c73532SDimitry Andric // split functions within the same section as specified by the attribute.
1533ac9a064cSDimitry Andric if (MF.getFunction().hasSection())
1534b1c73532SDimitry Andric return false;
1535b1c73532SDimitry Andric
1536b1c73532SDimitry Andric // We don't want to proceed further for cold functions
1537b1c73532SDimitry Andric // or functions of unknown hotness. Lukewarm functions have no prefix.
1538b1c73532SDimitry Andric std::optional<StringRef> SectionPrefix = MF.getFunction().getSectionPrefix();
1539b1c73532SDimitry Andric if (SectionPrefix &&
1540b1c73532SDimitry Andric (*SectionPrefix == "unlikely" || *SectionPrefix == "unknown")) {
1541b1c73532SDimitry Andric return false;
1542b1c73532SDimitry Andric }
1543b1c73532SDimitry Andric
1544b1c73532SDimitry Andric return true;
154558b69754SDimitry Andric }
154658b69754SDimitry Andric
1547e3b55780SDimitry Andric std::optional<ParamLoadedValue>
describeLoadedValue(const MachineInstr & MI,Register Reg) const1548706b4fc4SDimitry Andric TargetInstrInfo::describeLoadedValue(const MachineInstr &MI,
1549706b4fc4SDimitry Andric Register Reg) const {
15501d5ae102SDimitry Andric const MachineFunction *MF = MI.getMF();
1551706b4fc4SDimitry Andric const TargetRegisterInfo *TRI = MF->getSubtarget().getRegisterInfo();
1552706b4fc4SDimitry Andric DIExpression *Expr = DIExpression::get(MF->getFunction().getContext(), {});
1553706b4fc4SDimitry Andric int64_t Offset;
1554cfca06d7SDimitry Andric bool OffsetIsScalable;
15551d5ae102SDimitry Andric
1556706b4fc4SDimitry Andric // To simplify the sub-register handling, verify that we only need to
1557706b4fc4SDimitry Andric // consider physical registers.
1558706b4fc4SDimitry Andric assert(MF->getProperties().hasProperty(
1559706b4fc4SDimitry Andric MachineFunctionProperties::Property::NoVRegs));
1560706b4fc4SDimitry Andric
1561706b4fc4SDimitry Andric if (auto DestSrc = isCopyInstr(MI)) {
1562706b4fc4SDimitry Andric Register DestReg = DestSrc->Destination->getReg();
1563706b4fc4SDimitry Andric
1564cfca06d7SDimitry Andric // If the copy destination is the forwarding reg, describe the forwarding
1565cfca06d7SDimitry Andric // reg using the copy source as the backup location. Example:
1566cfca06d7SDimitry Andric //
1567cfca06d7SDimitry Andric // x0 = MOV x7
1568cfca06d7SDimitry Andric // call callee(x0) ; x0 described as x7
1569706b4fc4SDimitry Andric if (Reg == DestReg)
1570706b4fc4SDimitry Andric return ParamLoadedValue(*DestSrc->Source, Expr);
1571706b4fc4SDimitry Andric
15727fa27ce4SDimitry Andric // If the target's hook couldn't describe this copy, give up.
1573e3b55780SDimitry Andric return std::nullopt;
1574706b4fc4SDimitry Andric } else if (auto RegImm = isAddImmediate(MI, Reg)) {
1575706b4fc4SDimitry Andric Register SrcReg = RegImm->Reg;
1576706b4fc4SDimitry Andric Offset = RegImm->Imm;
1577706b4fc4SDimitry Andric Expr = DIExpression::prepend(Expr, DIExpression::ApplyOffset, Offset);
1578706b4fc4SDimitry Andric return ParamLoadedValue(MachineOperand::CreateReg(SrcReg, false), Expr);
1579706b4fc4SDimitry Andric } else if (MI.hasOneMemOperand()) {
1580706b4fc4SDimitry Andric // Only describe memory which provably does not escape the function. As
1581706b4fc4SDimitry Andric // described in llvm.org/PR43343, escaped memory may be clobbered by the
1582706b4fc4SDimitry Andric // callee (or by another thread).
1583706b4fc4SDimitry Andric const auto &TII = MF->getSubtarget().getInstrInfo();
1584706b4fc4SDimitry Andric const MachineFrameInfo &MFI = MF->getFrameInfo();
1585706b4fc4SDimitry Andric const MachineMemOperand *MMO = MI.memoperands()[0];
1586706b4fc4SDimitry Andric const PseudoSourceValue *PSV = MMO->getPseudoValue();
1587706b4fc4SDimitry Andric
1588706b4fc4SDimitry Andric // If the address points to "special" memory (e.g. a spill slot), it's
1589706b4fc4SDimitry Andric // sufficient to check that it isn't aliased by any high-level IR value.
1590706b4fc4SDimitry Andric if (!PSV || PSV->mayAlias(&MFI))
1591e3b55780SDimitry Andric return std::nullopt;
1592706b4fc4SDimitry Andric
1593706b4fc4SDimitry Andric const MachineOperand *BaseOp;
1594cfca06d7SDimitry Andric if (!TII->getMemOperandWithOffset(MI, BaseOp, Offset, OffsetIsScalable,
1595cfca06d7SDimitry Andric TRI))
1596e3b55780SDimitry Andric return std::nullopt;
1597706b4fc4SDimitry Andric
1598cfca06d7SDimitry Andric // FIXME: Scalable offsets are not yet handled in the offset code below.
1599cfca06d7SDimitry Andric if (OffsetIsScalable)
1600e3b55780SDimitry Andric return std::nullopt;
1601cfca06d7SDimitry Andric
1602cfca06d7SDimitry Andric // TODO: Can currently only handle mem instructions with a single define.
1603cfca06d7SDimitry Andric // An example from the x86 target:
1604cfca06d7SDimitry Andric // ...
1605cfca06d7SDimitry Andric // DIV64m $rsp, 1, $noreg, 24, $noreg, implicit-def dead $rax, implicit-def $rdx
1606cfca06d7SDimitry Andric // ...
1607cfca06d7SDimitry Andric //
1608cfca06d7SDimitry Andric if (MI.getNumExplicitDefs() != 1)
1609e3b55780SDimitry Andric return std::nullopt;
1610706b4fc4SDimitry Andric
1611706b4fc4SDimitry Andric // TODO: In what way do we need to take Reg into consideration here?
1612706b4fc4SDimitry Andric
1613706b4fc4SDimitry Andric SmallVector<uint64_t, 8> Ops;
1614706b4fc4SDimitry Andric DIExpression::appendOffset(Ops, Offset);
1615706b4fc4SDimitry Andric Ops.push_back(dwarf::DW_OP_deref_size);
1616ac9a064cSDimitry Andric Ops.push_back(MMO->getSize().hasValue() ? MMO->getSize().getValue()
1617ac9a064cSDimitry Andric : ~UINT64_C(0));
1618706b4fc4SDimitry Andric Expr = DIExpression::prependOpcodes(Expr, Ops);
1619706b4fc4SDimitry Andric return ParamLoadedValue(*BaseOp, Expr);
16201d5ae102SDimitry Andric }
16211d5ae102SDimitry Andric
1622e3b55780SDimitry Andric return std::nullopt;
16231d5ae102SDimitry Andric }
16241d5ae102SDimitry Andric
1625b1c73532SDimitry Andric // Get the call frame size just before MI.
getCallFrameSizeAt(MachineInstr & MI) const1626b1c73532SDimitry Andric unsigned TargetInstrInfo::getCallFrameSizeAt(MachineInstr &MI) const {
1627b1c73532SDimitry Andric // Search backwards from MI for the most recent call frame instruction.
1628b1c73532SDimitry Andric MachineBasicBlock *MBB = MI.getParent();
1629b1c73532SDimitry Andric for (auto &AdjI : reverse(make_range(MBB->instr_begin(), MI.getIterator()))) {
1630b1c73532SDimitry Andric if (AdjI.getOpcode() == getCallFrameSetupOpcode())
1631b1c73532SDimitry Andric return getFrameTotalSize(AdjI);
1632b1c73532SDimitry Andric if (AdjI.getOpcode() == getCallFrameDestroyOpcode())
1633b1c73532SDimitry Andric return 0;
1634b1c73532SDimitry Andric }
1635b1c73532SDimitry Andric
1636b1c73532SDimitry Andric // If none was found, use the call frame size from the start of the basic
1637b1c73532SDimitry Andric // block.
1638b1c73532SDimitry Andric return MBB->getCallFrameSize();
1639b1c73532SDimitry Andric }
1640b1c73532SDimitry Andric
164158b69754SDimitry Andric /// Both DefMI and UseMI must be valid. By default, call directly to the
164258b69754SDimitry Andric /// itinerary. This may be overriden by the target.
getOperandLatency(const InstrItineraryData * ItinData,const MachineInstr & DefMI,unsigned DefIdx,const MachineInstr & UseMI,unsigned UseIdx) const1643b1c73532SDimitry Andric std::optional<unsigned> TargetInstrInfo::getOperandLatency(
1644b1c73532SDimitry Andric const InstrItineraryData *ItinData, const MachineInstr &DefMI,
1645b1c73532SDimitry Andric unsigned DefIdx, const MachineInstr &UseMI, unsigned UseIdx) const {
164601095a5dSDimitry Andric unsigned DefClass = DefMI.getDesc().getSchedClass();
164701095a5dSDimitry Andric unsigned UseClass = UseMI.getDesc().getSchedClass();
164858b69754SDimitry Andric return ItinData->getOperandLatency(DefClass, DefIdx, UseClass, UseIdx);
164958b69754SDimitry Andric }
165058b69754SDimitry Andric
getRegSequenceInputs(const MachineInstr & MI,unsigned DefIdx,SmallVectorImpl<RegSubRegPairAndIdx> & InputRegs) const165167c32a98SDimitry Andric bool TargetInstrInfo::getRegSequenceInputs(
165267c32a98SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
165367c32a98SDimitry Andric SmallVectorImpl<RegSubRegPairAndIdx> &InputRegs) const {
165467c32a98SDimitry Andric assert((MI.isRegSequence() ||
165567c32a98SDimitry Andric MI.isRegSequenceLike()) && "Instruction do not have the proper type");
165667c32a98SDimitry Andric
165767c32a98SDimitry Andric if (!MI.isRegSequence())
165867c32a98SDimitry Andric return getRegSequenceLikeInputs(MI, DefIdx, InputRegs);
165967c32a98SDimitry Andric
166067c32a98SDimitry Andric // We are looking at:
166167c32a98SDimitry Andric // Def = REG_SEQUENCE v0, sub0, v1, sub1, ...
166267c32a98SDimitry Andric assert(DefIdx == 0 && "REG_SEQUENCE only has one def");
166367c32a98SDimitry Andric for (unsigned OpIdx = 1, EndOpIdx = MI.getNumOperands(); OpIdx != EndOpIdx;
166467c32a98SDimitry Andric OpIdx += 2) {
166567c32a98SDimitry Andric const MachineOperand &MOReg = MI.getOperand(OpIdx);
1666eb11fae6SDimitry Andric if (MOReg.isUndef())
1667eb11fae6SDimitry Andric continue;
166867c32a98SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(OpIdx + 1);
166967c32a98SDimitry Andric assert(MOSubIdx.isImm() &&
167067c32a98SDimitry Andric "One of the subindex of the reg_sequence is not an immediate");
167167c32a98SDimitry Andric // Record Reg:SubReg, SubIdx.
167267c32a98SDimitry Andric InputRegs.push_back(RegSubRegPairAndIdx(MOReg.getReg(), MOReg.getSubReg(),
167367c32a98SDimitry Andric (unsigned)MOSubIdx.getImm()));
167467c32a98SDimitry Andric }
167567c32a98SDimitry Andric return true;
167667c32a98SDimitry Andric }
167767c32a98SDimitry Andric
getExtractSubregInputs(const MachineInstr & MI,unsigned DefIdx,RegSubRegPairAndIdx & InputReg) const167867c32a98SDimitry Andric bool TargetInstrInfo::getExtractSubregInputs(
167967c32a98SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
168067c32a98SDimitry Andric RegSubRegPairAndIdx &InputReg) const {
168167c32a98SDimitry Andric assert((MI.isExtractSubreg() ||
168267c32a98SDimitry Andric MI.isExtractSubregLike()) && "Instruction do not have the proper type");
168367c32a98SDimitry Andric
168467c32a98SDimitry Andric if (!MI.isExtractSubreg())
168567c32a98SDimitry Andric return getExtractSubregLikeInputs(MI, DefIdx, InputReg);
168667c32a98SDimitry Andric
168767c32a98SDimitry Andric // We are looking at:
168867c32a98SDimitry Andric // Def = EXTRACT_SUBREG v0.sub1, sub0.
168967c32a98SDimitry Andric assert(DefIdx == 0 && "EXTRACT_SUBREG only has one def");
169067c32a98SDimitry Andric const MachineOperand &MOReg = MI.getOperand(1);
1691eb11fae6SDimitry Andric if (MOReg.isUndef())
1692eb11fae6SDimitry Andric return false;
169367c32a98SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(2);
169467c32a98SDimitry Andric assert(MOSubIdx.isImm() &&
169567c32a98SDimitry Andric "The subindex of the extract_subreg is not an immediate");
169667c32a98SDimitry Andric
169767c32a98SDimitry Andric InputReg.Reg = MOReg.getReg();
169867c32a98SDimitry Andric InputReg.SubReg = MOReg.getSubReg();
169967c32a98SDimitry Andric InputReg.SubIdx = (unsigned)MOSubIdx.getImm();
170067c32a98SDimitry Andric return true;
170167c32a98SDimitry Andric }
170267c32a98SDimitry Andric
getInsertSubregInputs(const MachineInstr & MI,unsigned DefIdx,RegSubRegPair & BaseReg,RegSubRegPairAndIdx & InsertedReg) const170367c32a98SDimitry Andric bool TargetInstrInfo::getInsertSubregInputs(
170467c32a98SDimitry Andric const MachineInstr &MI, unsigned DefIdx,
170567c32a98SDimitry Andric RegSubRegPair &BaseReg, RegSubRegPairAndIdx &InsertedReg) const {
170667c32a98SDimitry Andric assert((MI.isInsertSubreg() ||
170767c32a98SDimitry Andric MI.isInsertSubregLike()) && "Instruction do not have the proper type");
170867c32a98SDimitry Andric
170967c32a98SDimitry Andric if (!MI.isInsertSubreg())
171067c32a98SDimitry Andric return getInsertSubregLikeInputs(MI, DefIdx, BaseReg, InsertedReg);
171167c32a98SDimitry Andric
171267c32a98SDimitry Andric // We are looking at:
171367c32a98SDimitry Andric // Def = INSERT_SEQUENCE v0, v1, sub0.
171467c32a98SDimitry Andric assert(DefIdx == 0 && "INSERT_SUBREG only has one def");
171567c32a98SDimitry Andric const MachineOperand &MOBaseReg = MI.getOperand(1);
171667c32a98SDimitry Andric const MachineOperand &MOInsertedReg = MI.getOperand(2);
1717eb11fae6SDimitry Andric if (MOInsertedReg.isUndef())
1718eb11fae6SDimitry Andric return false;
171967c32a98SDimitry Andric const MachineOperand &MOSubIdx = MI.getOperand(3);
172067c32a98SDimitry Andric assert(MOSubIdx.isImm() &&
172167c32a98SDimitry Andric "One of the subindex of the reg_sequence is not an immediate");
172267c32a98SDimitry Andric BaseReg.Reg = MOBaseReg.getReg();
172367c32a98SDimitry Andric BaseReg.SubReg = MOBaseReg.getSubReg();
172467c32a98SDimitry Andric
172567c32a98SDimitry Andric InsertedReg.Reg = MOInsertedReg.getReg();
172667c32a98SDimitry Andric InsertedReg.SubReg = MOInsertedReg.getSubReg();
172767c32a98SDimitry Andric InsertedReg.SubIdx = (unsigned)MOSubIdx.getImm();
172867c32a98SDimitry Andric return true;
172967c32a98SDimitry Andric }
17301d5ae102SDimitry Andric
1731cfca06d7SDimitry Andric // Returns a MIRPrinter comment for this machine operand.
createMIROperandComment(const MachineInstr & MI,const MachineOperand & Op,unsigned OpIdx,const TargetRegisterInfo * TRI) const1732cfca06d7SDimitry Andric std::string TargetInstrInfo::createMIROperandComment(
1733cfca06d7SDimitry Andric const MachineInstr &MI, const MachineOperand &Op, unsigned OpIdx,
1734cfca06d7SDimitry Andric const TargetRegisterInfo *TRI) const {
1735cfca06d7SDimitry Andric
1736cfca06d7SDimitry Andric if (!MI.isInlineAsm())
1737cfca06d7SDimitry Andric return "";
1738cfca06d7SDimitry Andric
1739cfca06d7SDimitry Andric std::string Flags;
1740cfca06d7SDimitry Andric raw_string_ostream OS(Flags);
1741cfca06d7SDimitry Andric
1742cfca06d7SDimitry Andric if (OpIdx == InlineAsm::MIOp_ExtraInfo) {
1743cfca06d7SDimitry Andric // Print HasSideEffects, MayLoad, MayStore, IsAlignStack
1744cfca06d7SDimitry Andric unsigned ExtraInfo = Op.getImm();
1745cfca06d7SDimitry Andric bool First = true;
1746cfca06d7SDimitry Andric for (StringRef Info : InlineAsm::getExtraInfoNames(ExtraInfo)) {
1747cfca06d7SDimitry Andric if (!First)
1748cfca06d7SDimitry Andric OS << " ";
1749cfca06d7SDimitry Andric First = false;
1750cfca06d7SDimitry Andric OS << Info;
1751cfca06d7SDimitry Andric }
1752cfca06d7SDimitry Andric
1753ac9a064cSDimitry Andric return Flags;
1754cfca06d7SDimitry Andric }
1755cfca06d7SDimitry Andric
1756cfca06d7SDimitry Andric int FlagIdx = MI.findInlineAsmFlagIdx(OpIdx);
1757cfca06d7SDimitry Andric if (FlagIdx < 0 || (unsigned)FlagIdx != OpIdx)
1758cfca06d7SDimitry Andric return "";
1759cfca06d7SDimitry Andric
1760cfca06d7SDimitry Andric assert(Op.isImm() && "Expected flag operand to be an immediate");
1761cfca06d7SDimitry Andric // Pretty print the inline asm operand descriptor.
1762cfca06d7SDimitry Andric unsigned Flag = Op.getImm();
1763b1c73532SDimitry Andric const InlineAsm::Flag F(Flag);
1764b1c73532SDimitry Andric OS << F.getKindName();
1765cfca06d7SDimitry Andric
1766b1c73532SDimitry Andric unsigned RCID;
1767b1c73532SDimitry Andric if (!F.isImmKind() && !F.isMemKind() && F.hasRegClassConstraint(RCID)) {
1768cfca06d7SDimitry Andric if (TRI) {
1769cfca06d7SDimitry Andric OS << ':' << TRI->getRegClassName(TRI->getRegClass(RCID));
1770cfca06d7SDimitry Andric } else
1771cfca06d7SDimitry Andric OS << ":RC" << RCID;
1772cfca06d7SDimitry Andric }
1773cfca06d7SDimitry Andric
1774b1c73532SDimitry Andric if (F.isMemKind()) {
1775b1c73532SDimitry Andric InlineAsm::ConstraintCode MCID = F.getMemoryConstraintID();
1776cfca06d7SDimitry Andric OS << ":" << InlineAsm::getMemConstraintName(MCID);
1777cfca06d7SDimitry Andric }
1778cfca06d7SDimitry Andric
1779b1c73532SDimitry Andric unsigned TiedTo;
1780b1c73532SDimitry Andric if (F.isUseOperandTiedToDef(TiedTo))
1781cfca06d7SDimitry Andric OS << " tiedto:$" << TiedTo;
1782cfca06d7SDimitry Andric
1783b1c73532SDimitry Andric if ((F.isRegDefKind() || F.isRegDefEarlyClobberKind() || F.isRegUseKind()) &&
1784b1c73532SDimitry Andric F.getRegMayBeFolded())
1785b1c73532SDimitry Andric OS << " foldable";
1786b1c73532SDimitry Andric
1787ac9a064cSDimitry Andric return Flags;
1788cfca06d7SDimitry Andric }
1789cfca06d7SDimitry Andric
1790145449b1SDimitry Andric TargetInstrInfo::PipelinerLoopInfo::~PipelinerLoopInfo() = default;
1791f65dcba8SDimitry Andric
mergeOutliningCandidateAttributes(Function & F,std::vector<outliner::Candidate> & Candidates) const1792f65dcba8SDimitry Andric void TargetInstrInfo::mergeOutliningCandidateAttributes(
1793f65dcba8SDimitry Andric Function &F, std::vector<outliner::Candidate> &Candidates) const {
1794f65dcba8SDimitry Andric // Include target features from an arbitrary candidate for the outlined
1795f65dcba8SDimitry Andric // function. This makes sure the outlined function knows what kinds of
1796f65dcba8SDimitry Andric // instructions are going into it. This is fine, since all parent functions
1797f65dcba8SDimitry Andric // must necessarily support the instructions that are in the outlined region.
1798f65dcba8SDimitry Andric outliner::Candidate &FirstCand = Candidates.front();
1799f65dcba8SDimitry Andric const Function &ParentFn = FirstCand.getMF()->getFunction();
1800f65dcba8SDimitry Andric if (ParentFn.hasFnAttribute("target-features"))
1801f65dcba8SDimitry Andric F.addFnAttr(ParentFn.getFnAttribute("target-features"));
1802e3b55780SDimitry Andric if (ParentFn.hasFnAttribute("target-cpu"))
1803e3b55780SDimitry Andric F.addFnAttr(ParentFn.getFnAttribute("target-cpu"));
1804f65dcba8SDimitry Andric
1805f65dcba8SDimitry Andric // Set nounwind, so we don't generate eh_frame.
1806f65dcba8SDimitry Andric if (llvm::all_of(Candidates, [](const outliner::Candidate &C) {
1807f65dcba8SDimitry Andric return C.getMF()->getFunction().hasFnAttribute(Attribute::NoUnwind);
1808f65dcba8SDimitry Andric }))
1809f65dcba8SDimitry Andric F.addFnAttr(Attribute::NoUnwind);
1810f65dcba8SDimitry Andric }
181177fc4c14SDimitry Andric
getOutliningType(MachineBasicBlock::iterator & MIT,unsigned Flags) const18127fa27ce4SDimitry Andric outliner::InstrType TargetInstrInfo::getOutliningType(
18137fa27ce4SDimitry Andric MachineBasicBlock::iterator &MIT, unsigned Flags) const {
18147fa27ce4SDimitry Andric MachineInstr &MI = *MIT;
18157fa27ce4SDimitry Andric
18167fa27ce4SDimitry Andric // NOTE: MI.isMetaInstruction() will match CFI_INSTRUCTION, but some targets
18177fa27ce4SDimitry Andric // have support for outlining those. Special-case that here.
18187fa27ce4SDimitry Andric if (MI.isCFIInstruction())
18197fa27ce4SDimitry Andric // Just go right to the target implementation.
18207fa27ce4SDimitry Andric return getOutliningTypeImpl(MIT, Flags);
18217fa27ce4SDimitry Andric
18227fa27ce4SDimitry Andric // Be conservative about inline assembly.
18237fa27ce4SDimitry Andric if (MI.isInlineAsm())
18247fa27ce4SDimitry Andric return outliner::InstrType::Illegal;
18257fa27ce4SDimitry Andric
18267fa27ce4SDimitry Andric // Labels generally can't safely be outlined.
18277fa27ce4SDimitry Andric if (MI.isLabel())
18287fa27ce4SDimitry Andric return outliner::InstrType::Illegal;
18297fa27ce4SDimitry Andric
18307fa27ce4SDimitry Andric // Don't let debug instructions impact analysis.
18317fa27ce4SDimitry Andric if (MI.isDebugInstr())
18327fa27ce4SDimitry Andric return outliner::InstrType::Invisible;
18337fa27ce4SDimitry Andric
18347fa27ce4SDimitry Andric // Some other special cases.
18357fa27ce4SDimitry Andric switch (MI.getOpcode()) {
18367fa27ce4SDimitry Andric case TargetOpcode::IMPLICIT_DEF:
18377fa27ce4SDimitry Andric case TargetOpcode::KILL:
18387fa27ce4SDimitry Andric case TargetOpcode::LIFETIME_START:
18397fa27ce4SDimitry Andric case TargetOpcode::LIFETIME_END:
18407fa27ce4SDimitry Andric return outliner::InstrType::Invisible;
18417fa27ce4SDimitry Andric default:
18427fa27ce4SDimitry Andric break;
18437fa27ce4SDimitry Andric }
18447fa27ce4SDimitry Andric
18457fa27ce4SDimitry Andric // Is this a terminator for a basic block?
18467fa27ce4SDimitry Andric if (MI.isTerminator()) {
18477fa27ce4SDimitry Andric // If this is a branch to another block, we can't outline it.
18487fa27ce4SDimitry Andric if (!MI.getParent()->succ_empty())
18497fa27ce4SDimitry Andric return outliner::InstrType::Illegal;
18507fa27ce4SDimitry Andric
18517fa27ce4SDimitry Andric // Don't outline if the branch is not unconditional.
18527fa27ce4SDimitry Andric if (isPredicated(MI))
18537fa27ce4SDimitry Andric return outliner::InstrType::Illegal;
18547fa27ce4SDimitry Andric }
18557fa27ce4SDimitry Andric
18567fa27ce4SDimitry Andric // Make sure none of the operands of this instruction do anything that
18577fa27ce4SDimitry Andric // might break if they're moved outside their current function.
18587fa27ce4SDimitry Andric // This includes MachineBasicBlock references, BlockAddressses,
18597fa27ce4SDimitry Andric // Constant pool indices and jump table indices.
18607fa27ce4SDimitry Andric //
18617fa27ce4SDimitry Andric // A quick note on MO_TargetIndex:
18627fa27ce4SDimitry Andric // This doesn't seem to be used in any of the architectures that the
18637fa27ce4SDimitry Andric // MachineOutliner supports, but it was still filtered out in all of them.
18647fa27ce4SDimitry Andric // There was one exception (RISC-V), but MO_TargetIndex also isn't used there.
18657fa27ce4SDimitry Andric // As such, this check is removed both here and in the target-specific
18667fa27ce4SDimitry Andric // implementations. Instead, we assert to make sure this doesn't
18677fa27ce4SDimitry Andric // catch anyone off-guard somewhere down the line.
18687fa27ce4SDimitry Andric for (const MachineOperand &MOP : MI.operands()) {
18697fa27ce4SDimitry Andric // If you hit this assertion, please remove it and adjust
18707fa27ce4SDimitry Andric // `getOutliningTypeImpl` for your target appropriately if necessary.
18717fa27ce4SDimitry Andric // Adding the assertion back to other supported architectures
18727fa27ce4SDimitry Andric // would be nice too :)
18737fa27ce4SDimitry Andric assert(!MOP.isTargetIndex() && "This isn't used quite yet!");
18747fa27ce4SDimitry Andric
18757fa27ce4SDimitry Andric // CFI instructions should already have been filtered out at this point.
18767fa27ce4SDimitry Andric assert(!MOP.isCFIIndex() && "CFI instructions handled elsewhere!");
18777fa27ce4SDimitry Andric
18787fa27ce4SDimitry Andric // PrologEpilogInserter should've already run at this point.
18797fa27ce4SDimitry Andric assert(!MOP.isFI() && "FrameIndex instructions should be gone by now!");
18807fa27ce4SDimitry Andric
18817fa27ce4SDimitry Andric if (MOP.isMBB() || MOP.isBlockAddress() || MOP.isCPI() || MOP.isJTI())
18827fa27ce4SDimitry Andric return outliner::InstrType::Illegal;
18837fa27ce4SDimitry Andric }
18847fa27ce4SDimitry Andric
18857fa27ce4SDimitry Andric // If we don't know, delegate to the target-specific hook.
18867fa27ce4SDimitry Andric return getOutliningTypeImpl(MIT, Flags);
18877fa27ce4SDimitry Andric }
18887fa27ce4SDimitry Andric
isMBBSafeToOutlineFrom(MachineBasicBlock & MBB,unsigned & Flags) const188977fc4c14SDimitry Andric bool TargetInstrInfo::isMBBSafeToOutlineFrom(MachineBasicBlock &MBB,
189077fc4c14SDimitry Andric unsigned &Flags) const {
189177fc4c14SDimitry Andric // Some instrumentations create special TargetOpcode at the start which
189277fc4c14SDimitry Andric // expands to special code sequences which must be present.
189377fc4c14SDimitry Andric auto First = MBB.getFirstNonDebugInstr();
18947fa27ce4SDimitry Andric if (First == MBB.end())
18957fa27ce4SDimitry Andric return true;
18967fa27ce4SDimitry Andric
18977fa27ce4SDimitry Andric if (First->getOpcode() == TargetOpcode::FENTRY_CALL ||
18987fa27ce4SDimitry Andric First->getOpcode() == TargetOpcode::PATCHABLE_FUNCTION_ENTER)
189977fc4c14SDimitry Andric return false;
190077fc4c14SDimitry Andric
19017fa27ce4SDimitry Andric // Some instrumentations create special pseudo-instructions at or just before
19027fa27ce4SDimitry Andric // the end that must be present.
19037fa27ce4SDimitry Andric auto Last = MBB.getLastNonDebugInstr();
19047fa27ce4SDimitry Andric if (Last->getOpcode() == TargetOpcode::PATCHABLE_RET ||
19057fa27ce4SDimitry Andric Last->getOpcode() == TargetOpcode::PATCHABLE_TAIL_CALL)
19067fa27ce4SDimitry Andric return false;
19077fa27ce4SDimitry Andric
19087fa27ce4SDimitry Andric if (Last != First && Last->isReturn()) {
19097fa27ce4SDimitry Andric --Last;
19107fa27ce4SDimitry Andric if (Last->getOpcode() == TargetOpcode::PATCHABLE_FUNCTION_EXIT ||
19117fa27ce4SDimitry Andric Last->getOpcode() == TargetOpcode::PATCHABLE_TAIL_CALL)
19127fa27ce4SDimitry Andric return false;
19137fa27ce4SDimitry Andric }
191477fc4c14SDimitry Andric return true;
191577fc4c14SDimitry Andric }
1916