Add corner case logic to BasicStoreManager and GRSimpleVals::EvalBinOp to enable

reasoning about OSCompareAndSwap32Barrier/OSCompareAndSwap64Barrier. Essentially
the address of reference to a region (pointer-to-pointer) can be casted to
(int32_t*), and we need to handle the logic to convert the involved locations
back and forth from nonloc::LocAsInteger, nonloc::ConcreteInt, to Loc and
loc::ConcreteInt respectively. This adds some potentially suspect logic to
BasicStoreManager that allows the analyzer to reason about abuses of the C type
system. This should probably be refined, be ported over to RegionStoreManager,
and extended with "path-sensitive type checking" to flag bugs in clearly
incoherent code.

llvm-svn: 70382
This commit is contained in:
Ted Kremenek 2009-04-29 16:03:27 +00:00
parent e034868a36
commit 3941d22a98
2 changed files with 52 additions and 6 deletions

View File

@ -251,7 +251,7 @@ SVal BasicStoreManager::getLValueElement(const GRState* St, SVal Base,
return UnknownVal();
}
static bool isHigherOrderVoidPtr(QualType T, ASTContext &C) {
static bool isHigherOrderRawPtr(QualType T, ASTContext &C) {
bool foundPointer = false;
while (1) {
const PointerType *PT = T->getAsPointerType();
@ -259,6 +259,10 @@ static bool isHigherOrderVoidPtr(QualType T, ASTContext &C) {
if (!foundPointer)
return false;
// intptr_t* or intptr_t**, etc?
if (T->isIntegerType() && C.getTypeSize(T) == C.getTypeSize(C.VoidPtrTy))
return true;
QualType X = C.getCanonicalType(T).getUnqualifiedType();
return X == C.VoidTy;
}
@ -267,7 +271,7 @@ static bool isHigherOrderVoidPtr(QualType T, ASTContext &C) {
T = PT->getPointeeType();
}
}
SVal BasicStoreManager::Retrieve(const GRState* state, Loc loc, QualType T) {
if (isa<UnknownVal>(loc))
@ -281,12 +285,12 @@ SVal BasicStoreManager::Retrieve(const GRState* state, Loc loc, QualType T) {
const MemRegion* R = cast<loc::MemRegionVal>(loc).getRegion();
if (const TypedViewRegion *TR = dyn_cast<TypedViewRegion>(R)) {
// Just support void**, void***, etc., for now. This is needed
// to handle OSCompareAndSwapPtr().
// Just support void**, void***, intptr_t*, intptr_t**, etc., for now.
// This is needed to handle OSCompareAndSwapPtr() and friends.
ASTContext &Ctx = StateMgr.getContext();
QualType T = TR->getLValueType(Ctx);
if (!isHigherOrderVoidPtr(T, Ctx))
if (!isHigherOrderRawPtr(T, Ctx))
return UnknownVal();
// Otherwise, strip the views.
@ -321,6 +325,33 @@ Store BasicStoreManager::BindInternal(Store store, Loc loc, SVal V) {
case loc::MemRegionKind: {
const MemRegion* R = cast<loc::MemRegionVal>(loc).getRegion();
// Special case: handle store of pointer values (Loc) to pointers via
// a cast to intXX_t*, void*, etc. This is needed to handle
// OSCompareAndSwap32Barrier/OSCompareAndSwap64Barrier.
if (isa<Loc>(V) || isa<nonloc::LocAsInteger>(V))
if (const TypedViewRegion *TR = dyn_cast<TypedViewRegion>(R)) {
ASTContext &C = StateMgr.getContext();
QualType T = TR->getLValueType(C);
if (isHigherOrderRawPtr(T, C)) {
R = TR->removeViews();
if (nonloc::LocAsInteger *X = dyn_cast<nonloc::LocAsInteger>(&V)) {
// Only convert 'V' to a location iff the underlying region type
// is a location as well.
// FIXME: We are allowing a store of an arbitrary location to
// a pointer. We may wish to flag a type error here if the types
// are incompatible. This may also cause lots of breakage
// elsewhere. Food for thought.
if (const TypedRegion *TyR = dyn_cast<TypedRegion>(R)) {
if (TyR->isBoundable(C) &&
Loc::IsLocType(TyR->getRValueType(C)))
V = X->getLoc();
}
}
}
}
if (!(isa<VarRegion>(R) || isa<ObjCIvarRegion>(R)))
return store;

View File

@ -262,7 +262,22 @@ SVal GRSimpleVals::EvalBinOp(GRExprEngine& Eng, BinaryOperator::Opcode Op,
}
SVal GRSimpleVals::EvalBinOp(GRExprEngine& Eng, BinaryOperator::Opcode Op,
Loc L, NonLoc R) {
Loc L, NonLoc R) {
// Special case: 'R' is an integer that has the same width as a pointer and
// we are using the integer location in a comparison. Normally this cannot be
// triggered, but transfer functions like those for OSCommpareAndSwapBarrier32
// can generate comparisons that trigger this code.
// FIXME: Are all locations guaranteed to have pointer width?
if (BinaryOperator::isEqualityOp(Op)) {
if (nonloc::ConcreteInt *RInt = dyn_cast<nonloc::ConcreteInt>(&R)) {
const llvm::APSInt &X = RInt->getValue();
ASTContext &C = Eng.getContext();
if (C.getTypeSize(C.VoidPtrTy) == X.getBitWidth())
return EvalBinOp(Eng, Op, L, loc::ConcreteInt(X));
}
}
// Delegate pointer arithmetic to store manager.
return Eng.getStoreManager().EvalBinOp(Op, L, R);
}