@@ -4859,10 +4859,9 @@ static constexpr int MaxMemoryUsesToScan = 20;
48594859
48604860// / Recursively walk all the uses of I until we find a memory use.
48614861// / If we find an obviously non-foldable instruction, return true.
4862- // / Add the ultimately found memory instructions to MemoryUses.
4862+ // / Add accessed addresses and types to MemoryUses.
48634863static bool FindAllMemoryUses (
4864- Instruction *I,
4865- SmallVectorImpl<std::pair<Instruction *, unsigned >> &MemoryUses,
4864+ Instruction *I, SmallVectorImpl<std::pair<Value *, Type *>> &MemoryUses,
48664865 SmallPtrSetImpl<Instruction *> &ConsideredInsts, const TargetLowering &TLI,
48674866 const TargetRegisterInfo &TRI, bool OptSize, ProfileSummaryInfo *PSI,
48684867 BlockFrequencyInfo *BFI, int SeenInsts = 0 ) {
@@ -4883,31 +4882,28 @@ static bool FindAllMemoryUses(
48834882
48844883 Instruction *UserI = cast<Instruction>(U.getUser ());
48854884 if (LoadInst *LI = dyn_cast<LoadInst>(UserI)) {
4886- MemoryUses.push_back (std::make_pair (LI, U. getOperandNo ()) );
4885+ MemoryUses.push_back ({U. get (), LI-> getType ()} );
48874886 continue ;
48884887 }
48894888
48904889 if (StoreInst *SI = dyn_cast<StoreInst>(UserI)) {
4891- unsigned opNo = U.getOperandNo ();
4892- if (opNo != StoreInst::getPointerOperandIndex ())
4890+ if (U.getOperandNo () != StoreInst::getPointerOperandIndex ())
48934891 return true ; // Storing addr, not into addr.
4894- MemoryUses.push_back (std::make_pair (SI, opNo) );
4892+ MemoryUses.push_back ({U. get (), SI-> getValueOperand ()-> getType ()} );
48954893 continue ;
48964894 }
48974895
48984896 if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(UserI)) {
4899- unsigned opNo = U.getOperandNo ();
4900- if (opNo != AtomicRMWInst::getPointerOperandIndex ())
4897+ if (U.getOperandNo () != AtomicRMWInst::getPointerOperandIndex ())
49014898 return true ; // Storing addr, not into addr.
4902- MemoryUses.push_back (std::make_pair (RMW, opNo) );
4899+ MemoryUses.push_back ({U. get (), RMW-> getValOperand ()-> getType ()} );
49034900 continue ;
49044901 }
49054902
49064903 if (AtomicCmpXchgInst *CmpX = dyn_cast<AtomicCmpXchgInst>(UserI)) {
4907- unsigned opNo = U.getOperandNo ();
4908- if (opNo != AtomicCmpXchgInst::getPointerOperandIndex ())
4904+ if (U.getOperandNo () != AtomicCmpXchgInst::getPointerOperandIndex ())
49094905 return true ; // Storing addr, not into addr.
4910- MemoryUses.push_back (std::make_pair (CmpX, opNo) );
4906+ MemoryUses.push_back ({U. get (), CmpX-> getCompareOperand ()-> getType ()} );
49114907 continue ;
49124908 }
49134909
@@ -5017,7 +5013,7 @@ isProfitableToFoldIntoAddressingMode(Instruction *I, ExtAddrMode &AMBefore,
50175013 // we can remove the addressing mode and effectively trade one live register
50185014 // for another (at worst.) In this context, folding an addressing mode into
50195015 // the use is just a particularly nice way of sinking it.
5020- SmallVector<std::pair<Instruction*, unsigned >, 16 > MemoryUses;
5016+ SmallVector<std::pair<Value *, Type * >, 16 > MemoryUses;
50215017 SmallPtrSet<Instruction*, 16 > ConsideredInsts;
50225018 if (FindAllMemoryUses (I, MemoryUses, ConsideredInsts, TLI, TRI, OptSize,
50235019 PSI, BFI))
@@ -5033,18 +5029,10 @@ isProfitableToFoldIntoAddressingMode(Instruction *I, ExtAddrMode &AMBefore,
50335029 // growth since most architectures have some reasonable small and fast way to
50345030 // compute an effective address. (i.e LEA on x86)
50355031 SmallVector<Instruction*, 32 > MatchedAddrModeInsts;
5036- for (unsigned i = 0 , e = MemoryUses.size (); i != e; ++i) {
5037- Instruction *User = MemoryUses[i].first ;
5038- unsigned OpNo = MemoryUses[i].second ;
5039-
5040- // Get the access type of this use. If the use isn't a pointer, we don't
5041- // know what it accesses.
5042- Value *Address = User->getOperand (OpNo);
5043- PointerType *AddrTy = dyn_cast<PointerType>(Address->getType ());
5044- if (!AddrTy)
5045- return false ;
5046- Type *AddressAccessTy = AddrTy->getElementType ();
5047- unsigned AS = AddrTy->getAddressSpace ();
5032+ for (const std::pair<Value *, Type *> &Pair : MemoryUses) {
5033+ Value *Address = Pair.first ;
5034+ Type *AddressAccessTy = Pair.second ;
5035+ unsigned AS = Address->getType ()->getPointerAddressSpace ();
50485036
50495037 // Do a match against the root of this address, ignoring profitability. This
50505038 // will tell us if the addressing mode for the memory operation will
0 commit comments