Analysis: Reorder code in isDereferenceableAndAlignedPointer
GEPs should be the most common and basic case, so try that first.
This commit is contained in:
parent
ff2b60bbcb
commit
4f2f7e84ff
|
@ -54,63 +54,6 @@ static bool isDereferenceableAndAlignedPointer(
|
||||||
// Note that it is not safe to speculate into a malloc'd region because
|
// Note that it is not safe to speculate into a malloc'd region because
|
||||||
// malloc may return null.
|
// malloc may return null.
|
||||||
|
|
||||||
// Recurse into both hands of select.
|
|
||||||
if (const SelectInst *Sel = dyn_cast<SelectInst>(V)) {
|
|
||||||
return isDereferenceableAndAlignedPointer(Sel->getTrueValue(), Alignment,
|
|
||||||
Size, DL, CtxI, AC, DT, TLI,
|
|
||||||
Visited, MaxDepth) &&
|
|
||||||
isDereferenceableAndAlignedPointer(Sel->getFalseValue(), Alignment,
|
|
||||||
Size, DL, CtxI, AC, DT, TLI,
|
|
||||||
Visited, MaxDepth);
|
|
||||||
}
|
|
||||||
|
|
||||||
// bitcast instructions are no-ops as far as dereferenceability is concerned.
|
|
||||||
if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) {
|
|
||||||
if (BC->getSrcTy()->isPointerTy())
|
|
||||||
return isDereferenceableAndAlignedPointer(BC->getOperand(0), Alignment,
|
|
||||||
Size, DL, CtxI, AC, DT, TLI,
|
|
||||||
Visited, MaxDepth);
|
|
||||||
}
|
|
||||||
|
|
||||||
bool CheckForNonNull, CheckForFreed;
|
|
||||||
APInt KnownDerefBytes(Size.getBitWidth(),
|
|
||||||
V->getPointerDereferenceableBytes(DL, CheckForNonNull,
|
|
||||||
CheckForFreed));
|
|
||||||
if (KnownDerefBytes.getBoolValue() && KnownDerefBytes.uge(Size) &&
|
|
||||||
!CheckForFreed)
|
|
||||||
if (!CheckForNonNull || isKnownNonZero(V, DL, 0, AC, CtxI, DT)) {
|
|
||||||
// As we recursed through GEPs to get here, we've incrementally checked
|
|
||||||
// that each step advanced by a multiple of the alignment. If our base is
|
|
||||||
// properly aligned, then the original offset accessed must also be.
|
|
||||||
APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0);
|
|
||||||
return isAligned(V, Offset, Alignment, DL);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (CtxI) {
|
|
||||||
/// Look through assumes to see if both dereferencability and alignment can
|
|
||||||
/// be provent by an assume
|
|
||||||
RetainedKnowledge AlignRK;
|
|
||||||
RetainedKnowledge DerefRK;
|
|
||||||
if (getKnowledgeForValue(
|
|
||||||
V, {Attribute::Dereferenceable, Attribute::Alignment}, AC,
|
|
||||||
[&](RetainedKnowledge RK, Instruction *Assume, auto) {
|
|
||||||
if (!isValidAssumeForContext(Assume, CtxI))
|
|
||||||
return false;
|
|
||||||
if (RK.AttrKind == Attribute::Alignment)
|
|
||||||
AlignRK = std::max(AlignRK, RK);
|
|
||||||
if (RK.AttrKind == Attribute::Dereferenceable)
|
|
||||||
DerefRK = std::max(DerefRK, RK);
|
|
||||||
if (AlignRK && DerefRK && AlignRK.ArgValue >= Alignment.value() &&
|
|
||||||
DerefRK.ArgValue >= Size.getZExtValue())
|
|
||||||
return true; // We have found what we needed so we stop looking
|
|
||||||
return false; // Other assumes may have better information. so
|
|
||||||
// keep looking
|
|
||||||
}))
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
/// TODO refactor this function to be able to search independently for
|
|
||||||
/// Dereferencability and Alignment requirements.
|
|
||||||
|
|
||||||
// For GEPs, determine if the indexing lands within the allocated object.
|
// For GEPs, determine if the indexing lands within the allocated object.
|
||||||
if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
|
if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
|
||||||
const Value *Base = GEP->getPointerOperand();
|
const Value *Base = GEP->getPointerOperand();
|
||||||
|
@ -134,16 +77,41 @@ static bool isDereferenceableAndAlignedPointer(
|
||||||
CtxI, AC, DT, TLI, Visited, MaxDepth);
|
CtxI, AC, DT, TLI, Visited, MaxDepth);
|
||||||
}
|
}
|
||||||
|
|
||||||
// For gc.relocate, look through relocations
|
// bitcast instructions are no-ops as far as dereferenceability is concerned.
|
||||||
if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
|
if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) {
|
||||||
return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(),
|
if (BC->getSrcTy()->isPointerTy())
|
||||||
Alignment, Size, DL, CtxI, AC, DT,
|
return isDereferenceableAndAlignedPointer(
|
||||||
TLI, Visited, MaxDepth);
|
BC->getOperand(0), Alignment, Size, DL, CtxI, AC, DT, TLI,
|
||||||
|
Visited, MaxDepth);
|
||||||
|
}
|
||||||
|
|
||||||
if (const AddrSpaceCastOperator *ASC = dyn_cast<AddrSpaceCastOperator>(V))
|
// Recurse into both hands of select.
|
||||||
return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment,
|
if (const SelectInst *Sel = dyn_cast<SelectInst>(V)) {
|
||||||
|
return isDereferenceableAndAlignedPointer(Sel->getTrueValue(), Alignment,
|
||||||
|
Size, DL, CtxI, AC, DT, TLI,
|
||||||
|
Visited, MaxDepth) &&
|
||||||
|
isDereferenceableAndAlignedPointer(Sel->getFalseValue(), Alignment,
|
||||||
Size, DL, CtxI, AC, DT, TLI,
|
Size, DL, CtxI, AC, DT, TLI,
|
||||||
Visited, MaxDepth);
|
Visited, MaxDepth);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool CheckForNonNull, CheckForFreed;
|
||||||
|
APInt KnownDerefBytes(Size.getBitWidth(),
|
||||||
|
V->getPointerDereferenceableBytes(DL, CheckForNonNull,
|
||||||
|
CheckForFreed));
|
||||||
|
if (KnownDerefBytes.getBoolValue() && KnownDerefBytes.uge(Size) &&
|
||||||
|
!CheckForFreed)
|
||||||
|
if (!CheckForNonNull || isKnownNonZero(V, DL, 0, AC, CtxI, DT)) {
|
||||||
|
// As we recursed through GEPs to get here, we've incrementally checked
|
||||||
|
// that each step advanced by a multiple of the alignment. If our base is
|
||||||
|
// properly aligned, then the original offset accessed must also be.
|
||||||
|
APInt Offset(DL.getTypeStoreSizeInBits(V->getType()), 0);
|
||||||
|
return isAligned(V, Offset, Alignment, DL);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// TODO refactor this function to be able to search independently for
|
||||||
|
/// Dereferencability and Alignment requirements.
|
||||||
|
|
||||||
|
|
||||||
if (const auto *Call = dyn_cast<CallBase>(V)) {
|
if (const auto *Call = dyn_cast<CallBase>(V)) {
|
||||||
if (auto *RP = getArgumentAliasingToReturnedPointer(Call, true))
|
if (auto *RP = getArgumentAliasingToReturnedPointer(Call, true))
|
||||||
|
@ -178,6 +146,40 @@ static bool isDereferenceableAndAlignedPointer(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// For gc.relocate, look through relocations
|
||||||
|
if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
|
||||||
|
return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(),
|
||||||
|
Alignment, Size, DL, CtxI, AC, DT,
|
||||||
|
TLI, Visited, MaxDepth);
|
||||||
|
|
||||||
|
if (const AddrSpaceCastOperator *ASC = dyn_cast<AddrSpaceCastOperator>(V))
|
||||||
|
return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment,
|
||||||
|
Size, DL, CtxI, AC, DT, TLI,
|
||||||
|
Visited, MaxDepth);
|
||||||
|
|
||||||
|
if (CtxI) {
|
||||||
|
/// Look through assumes to see if both dereferencability and alignment can
|
||||||
|
/// be provent by an assume
|
||||||
|
RetainedKnowledge AlignRK;
|
||||||
|
RetainedKnowledge DerefRK;
|
||||||
|
if (getKnowledgeForValue(
|
||||||
|
V, {Attribute::Dereferenceable, Attribute::Alignment}, AC,
|
||||||
|
[&](RetainedKnowledge RK, Instruction *Assume, auto) {
|
||||||
|
if (!isValidAssumeForContext(Assume, CtxI))
|
||||||
|
return false;
|
||||||
|
if (RK.AttrKind == Attribute::Alignment)
|
||||||
|
AlignRK = std::max(AlignRK, RK);
|
||||||
|
if (RK.AttrKind == Attribute::Dereferenceable)
|
||||||
|
DerefRK = std::max(DerefRK, RK);
|
||||||
|
if (AlignRK && DerefRK && AlignRK.ArgValue >= Alignment.value() &&
|
||||||
|
DerefRK.ArgValue >= Size.getZExtValue())
|
||||||
|
return true; // We have found what we needed so we stop looking
|
||||||
|
return false; // Other assumes may have better information. so
|
||||||
|
// keep looking
|
||||||
|
}))
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
// If we don't know, assume the worst.
|
// If we don't know, assume the worst.
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue