@@ -50,7 +50,7 @@ static bool isAligned(const Value *Base, const APInt &Offset, Align Alignment,
5050// / Test if V is always a pointer to allocated and suitably aligned memory for
5151// / a simple load or store.
5252static bool isDereferenceableAndAlignedPointer (
53- const Value *V, unsigned Align, const APInt &Size, const DataLayout &DL,
53+ const Value *V, Align Alignment , const APInt &Size, const DataLayout &DL,
5454 const Instruction *CtxI, const DominatorTree *DT,
5555 SmallPtrSetImpl<const Value *> &Visited) {
5656 // Already visited? Bail out, we've likely hit unreachable code.
@@ -62,8 +62,8 @@ static bool isDereferenceableAndAlignedPointer(
6262
6363 // bitcast instructions are no-ops as far as dereferenceability is concerned.
6464 if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V))
65- return isDereferenceableAndAlignedPointer (BC->getOperand (0 ), Align, Size ,
66- DL, CtxI, DT, Visited);
65+ return isDereferenceableAndAlignedPointer (BC->getOperand (0 ), Alignment ,
66+ Size, DL, CtxI, DT, Visited);
6767
6868 bool CheckForNonNull = false ;
6969 APInt KnownDerefBytes (Size.getBitWidth (),
@@ -76,7 +76,7 @@ static bool isDereferenceableAndAlignedPointer(
7676 Type *Ty = V->getType ();
7777 assert (Ty->isSized () && " must be sized" );
7878 APInt Offset (DL.getTypeStoreSizeInBits (Ty), 0 );
79- return isAligned (V, Offset, llvm::Align (Align) , DL);
79+ return isAligned (V, Offset, Alignment , DL);
8080 }
8181
8282 // For GEPs, determine if the indexing lands within the allocated object.
@@ -85,7 +85,8 @@ static bool isDereferenceableAndAlignedPointer(
8585
8686 APInt Offset (DL.getIndexTypeSizeInBits (GEP->getType ()), 0 );
8787 if (!GEP->accumulateConstantOffset (DL, Offset) || Offset.isNegative () ||
88- !Offset.urem (APInt (Offset.getBitWidth (), Align)).isMinValue ())
88+ !Offset.urem (APInt (Offset.getBitWidth (), Alignment.value ()))
89+ .isMinValue ())
8990 return false ;
9091
9192 // If the base pointer is dereferenceable for Offset+Size bytes, then the
@@ -97,72 +98,69 @@ static bool isDereferenceableAndAlignedPointer(
9798 // Offset and Size may have different bit widths if we have visited an
9899 // addrspacecast, so we can't do arithmetic directly on the APInt values.
99100 return isDereferenceableAndAlignedPointer (
100- Base, Align , Offset + Size.sextOrTrunc (Offset.getBitWidth ()),
101- DL, CtxI, DT, Visited);
101+ Base, Alignment , Offset + Size.sextOrTrunc (Offset.getBitWidth ()), DL ,
102+ CtxI, DT, Visited);
102103 }
103104
104105 // For gc.relocate, look through relocations
105106 if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
106107 return isDereferenceableAndAlignedPointer (
107- RelocateInst->getDerivedPtr (), Align , Size, DL, CtxI, DT, Visited);
108+ RelocateInst->getDerivedPtr (), Alignment , Size, DL, CtxI, DT, Visited);
108109
109110 if (const AddrSpaceCastInst *ASC = dyn_cast<AddrSpaceCastInst>(V))
110- return isDereferenceableAndAlignedPointer (ASC->getOperand (0 ), Align, Size ,
111- DL, CtxI, DT, Visited);
111+ return isDereferenceableAndAlignedPointer (ASC->getOperand (0 ), Alignment ,
112+ Size, DL, CtxI, DT, Visited);
112113
113114 if (const auto *Call = dyn_cast<CallBase>(V))
114115 if (auto *RP = getArgumentAliasingToReturnedPointer (Call, true ))
115- return isDereferenceableAndAlignedPointer (RP, Align , Size, DL, CtxI, DT ,
116- Visited);
116+ return isDereferenceableAndAlignedPointer (RP, Alignment , Size, DL, CtxI,
117+ DT, Visited);
117118
118119 // If we don't know, assume the worst.
119120 return false ;
120121}
121122
122- bool llvm::isDereferenceableAndAlignedPointer (const Value *V, unsigned Align,
123+ bool llvm::isDereferenceableAndAlignedPointer (const Value *V, Align Alignment ,
123124 const APInt &Size,
124125 const DataLayout &DL,
125126 const Instruction *CtxI,
126127 const DominatorTree *DT) {
127- assert (Align != 0 && " expected explicitly set alignment" );
128128 // Note: At the moment, Size can be zero. This ends up being interpreted as
129129 // a query of whether [Base, V] is dereferenceable and V is aligned (since
130130 // that's what the implementation happened to do). It's unclear if this is
131131 // the desired semantic, but at least SelectionDAG does exercise this case.
132132
133133 SmallPtrSet<const Value *, 32 > Visited;
134- return ::isDereferenceableAndAlignedPointer (V, Align , Size, DL, CtxI, DT,
134+ return ::isDereferenceableAndAlignedPointer (V, Alignment , Size, DL, CtxI, DT,
135135 Visited);
136136}
137137
138138bool llvm::isDereferenceableAndAlignedPointer (const Value *V, Type *Ty,
139- unsigned Align ,
139+ MaybeAlign MA ,
140140 const DataLayout &DL,
141141 const Instruction *CtxI,
142142 const DominatorTree *DT) {
143+ if (!Ty->isSized ())
144+ return false ;
145+
143146 // When dereferenceability information is provided by a dereferenceable
144147 // attribute, we know exactly how many bytes are dereferenceable. If we can
145148 // determine the exact offset to the attributed variable, we can use that
146149 // information here.
147150
148151 // Require ABI alignment for loads without alignment specification
149- if (Align == 0 )
150- Align = DL.getABITypeAlignment (Ty);
151-
152- if (!Ty->isSized ())
153- return false ;
154-
152+ const Align Alignment = DL.getValueOrABITypeAlignment (MA, Ty);
155153 APInt AccessSize (DL.getIndexTypeSizeInBits (V->getType ()),
156154 DL.getTypeStoreSize (Ty));
157- return isDereferenceableAndAlignedPointer (V, Align , AccessSize,
158- DL, CtxI, DT);
155+ return isDereferenceableAndAlignedPointer (V, Alignment , AccessSize, DL, CtxI ,
156+ DT);
159157}
160158
161159bool llvm::isDereferenceablePointer (const Value *V, Type *Ty,
162160 const DataLayout &DL,
163161 const Instruction *CtxI,
164162 const DominatorTree *DT) {
165- return isDereferenceableAndAlignedPointer (V, Ty, 1 , DL, CtxI, DT);
163+ return isDereferenceableAndAlignedPointer (V, Ty, Align::None () , DL, CtxI, DT);
166164}
167165
168166// / Test if A and B will obviously have the same value.
@@ -204,17 +202,16 @@ bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
204202
205203 APInt EltSize (DL.getIndexTypeSizeInBits (Ptr->getType ()),
206204 DL.getTypeStoreSize (LI->getType ()));
207- unsigned Align = LI->getAlignment ();
208- if (Align == 0 )
209- Align = DL.getABITypeAlignment (LI->getType ());
205+ const Align Alignment = DL.getValueOrABITypeAlignment (
206+ MaybeAlign (LI->getAlignment ()), LI->getType ());
210207
211208 Instruction *HeaderFirstNonPHI = L->getHeader ()->getFirstNonPHI ();
212209
213210 // If given a uniform (i.e. non-varying) address, see if we can prove the
214211 // access is safe within the loop w/o needing predication.
215212 if (L->isLoopInvariant (Ptr))
216- return isDereferenceableAndAlignedPointer (Ptr, Align , EltSize, DL,
217- HeaderFirstNonPHI, &DT);
213+ return isDereferenceableAndAlignedPointer (Ptr, Alignment , EltSize, DL,
214+ HeaderFirstNonPHI, &DT);
218215
219216 // Otherwise, check to see if we have a repeating access pattern where we can
220217 // prove that all accesses are well aligned and dereferenceable.
@@ -245,10 +242,10 @@ bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
245242 // For the moment, restrict ourselves to the case where the access size is a
246243 // multiple of the requested alignment and the base is aligned.
247244 // TODO: generalize if a case found which warrants
248- if (EltSize.urem (Align ) != 0 )
245+ if (EltSize.urem (Alignment. value () ) != 0 )
249246 return false ;
250- return isDereferenceableAndAlignedPointer (Base, Align , AccessSize,
251- DL, HeaderFirstNonPHI, &DT);
247+ return isDereferenceableAndAlignedPointer (Base, Alignment , AccessSize, DL ,
248+ HeaderFirstNonPHI, &DT);
252249}
253250
254251// / Check if executing a load of this pointer value cannot trap.
@@ -262,18 +259,17 @@ bool llvm::isDereferenceableAndAlignedInLoop(LoadInst *LI, Loop *L,
262259// /
263260// / This uses the pointee type to determine how many bytes need to be safe to
264261// / load from the pointer.
265- bool llvm::isSafeToLoadUnconditionally (Value *V, unsigned Align , APInt &Size,
262+ bool llvm::isSafeToLoadUnconditionally (Value *V, MaybeAlign MA , APInt &Size,
266263 const DataLayout &DL,
267264 Instruction *ScanFrom,
268265 const DominatorTree *DT) {
269266 // Zero alignment means that the load has the ABI alignment for the target
270- if (Align == 0 )
271- Align = DL.getABITypeAlignment (V->getType ()->getPointerElementType ());
272- assert (isPowerOf2_32 (Align));
267+ const Align Alignment =
268+ DL.getValueOrABITypeAlignment (MA, V->getType ()->getPointerElementType ());
273269
274270 // If DT is not specified we can't make context-sensitive query
275271 const Instruction* CtxI = DT ? ScanFrom : nullptr ;
276- if (isDereferenceableAndAlignedPointer (V, Align , Size, DL, CtxI, DT))
272+ if (isDereferenceableAndAlignedPointer (V, Alignment , Size, DL, CtxI, DT))
277273 return true ;
278274
279275 if (!ScanFrom)
@@ -305,28 +301,29 @@ bool llvm::isSafeToLoadUnconditionally(Value *V, unsigned Align, APInt &Size,
305301 return false ;
306302
307303 Value *AccessedPtr;
308- unsigned AccessedAlign ;
304+ MaybeAlign MaybeAccessedAlign ;
309305 if (LoadInst *LI = dyn_cast<LoadInst>(BBI)) {
310306 // Ignore volatile loads. The execution of a volatile load cannot
311307 // be used to prove an address is backed by regular memory; it can,
312308 // for example, point to an MMIO register.
313309 if (LI->isVolatile ())
314310 continue ;
315311 AccessedPtr = LI->getPointerOperand ();
316- AccessedAlign = LI->getAlignment ();
312+ MaybeAccessedAlign = MaybeAlign ( LI->getAlignment () );
317313 } else if (StoreInst *SI = dyn_cast<StoreInst>(BBI)) {
318314 // Ignore volatile stores (see comment for loads).
319315 if (SI->isVolatile ())
320316 continue ;
321317 AccessedPtr = SI->getPointerOperand ();
322- AccessedAlign = SI->getAlignment ();
318+ MaybeAccessedAlign = MaybeAlign ( SI->getAlignment () );
323319 } else
324320 continue ;
325321
326322 Type *AccessedTy = AccessedPtr->getType ()->getPointerElementType ();
327- if (AccessedAlign == 0 )
328- AccessedAlign = DL.getABITypeAlignment (AccessedTy);
329- if (AccessedAlign < Align)
323+
324+ const Align AccessedAlign =
325+ DL.getValueOrABITypeAlignment (MaybeAccessedAlign, AccessedTy);
326+ if (AccessedAlign < Alignment)
330327 continue ;
331328
332329 // Handle trivial cases.
@@ -341,12 +338,12 @@ bool llvm::isSafeToLoadUnconditionally(Value *V, unsigned Align, APInt &Size,
341338 return false ;
342339}
343340
344- bool llvm::isSafeToLoadUnconditionally (Value *V, Type *Ty, unsigned Align ,
341+ bool llvm::isSafeToLoadUnconditionally (Value *V, Type *Ty, MaybeAlign Alignment ,
345342 const DataLayout &DL,
346343 Instruction *ScanFrom,
347344 const DominatorTree *DT) {
348345 APInt Size (DL.getIndexTypeSizeInBits (V->getType ()), DL.getTypeStoreSize (Ty));
349- return isSafeToLoadUnconditionally (V, Align , Size, DL, ScanFrom, DT);
346+ return isSafeToLoadUnconditionally (V, Alignment , Size, DL, ScanFrom, DT);
350347}
351348
352349 // / DefMaxInstsToScan - the default number of maximum instructions
0 commit comments