reference, declarationdefinition
definition → references, declarations, derived classes, virtual overrides
reference to multiple definitions → definitions
unreferenced

References

lib/CodeGen/PeepholeOptimizer.cpp
 1643       LocalMIs.insert(MI);
 1646       if (MI->isDebugInstr())
 1649       if (MI->isPosition())
 1652       if (IsLoopHeader && MI->isPHI()) {
 1653         if (optimizeRecurrence(*MI)) {
 1659       if (!MI->isCopy()) {
 1660         for (const MachineOperand &MO : MI->operands()) {
 1670                            << "NAPhysCopy: invalidating because of " << *MI);
 1680                            << "NAPhysCopy: invalidating because of " << *MI);
 1688       if (MI->isImplicitDef() || MI->isKill())
 1688       if (MI->isImplicitDef() || MI->isKill())
 1691       if (MI->isInlineAsm() || MI->hasUnmodeledSideEffects()) {
 1691       if (MI->isInlineAsm() || MI->hasUnmodeledSideEffects()) {
 1697                           << *MI);
 1701       if ((isUncoalescableCopy(*MI) &&
 1702            optimizeUncoalescableCopy(*MI, LocalMIs)) ||
 1703           (MI->isCompare() && optimizeCmpInstr(*MI)) ||
 1703           (MI->isCompare() && optimizeCmpInstr(*MI)) ||
 1704           (MI->isSelect() && optimizeSelect(*MI, LocalMIs))) {
 1704           (MI->isSelect() && optimizeSelect(*MI, LocalMIs))) {
 1706         LocalMIs.erase(MI);
 1711       if (MI->isConditionalBranch() && optimizeCondBranch(*MI)) {
 1711       if (MI->isConditionalBranch() && optimizeCondBranch(*MI)) {
 1716       if (isCoalescableCopy(*MI) && optimizeCoalescableCopy(*MI)) {
 1716       if (isCoalescableCopy(*MI) && optimizeCoalescableCopy(*MI)) {
 1722       if (MI->isCopy() &&
 1723           (foldRedundantCopy(*MI, CopySrcRegs, CopySrcMIs) ||
 1724            foldRedundantNAPhysCopy(*MI, NAPhysToVirtMIs))) {
 1725         LocalMIs.erase(MI);
 1726         MI->eraseFromParent();
 1731       if (isMoveImmediate(*MI, ImmDefRegs, ImmDefMIs)) {
 1734         Changed |= optimizeExtInstr(*MI, MBB, LocalMIs);
 1738         MII = MI;
 1741           Changed |= foldImmediate(*MI, ImmDefRegs, ImmDefMIs);
 1747       if (!isLoadFoldable(*MI, FoldAsLoadDefCandidates) &&
 1755         const MCInstrDesc &MIDesc = MI->getDesc();
 1756         for (unsigned i = MIDesc.getNumDefs(); i != MI->getNumOperands();
 1758           const MachineOperand &MOp = MI->getOperand(i);
 1770                     TII->optimizeLoadInstr(*MI, MRI, FoldAsLoadDefReg, DefMI)) {
 1773               LLVM_DEBUG(dbgs() << "Replacing: " << *MI);
 1775               LocalMIs.erase(MI);
 1778               if (MI->isCall())
 1779                 MI->getMF()->moveCallSiteInfo(MI, FoldMI);
 1779                 MI->getMF()->moveCallSiteInfo(MI, FoldMI);
 1780               MI->eraseFromParent();
 1788               MI = FoldMI;
 1797       if (MI->isLoadFoldBarrier()) {
 1798         LLVM_DEBUG(dbgs() << "Encountered load fold barrier on " << *MI);