• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //== ArrayBoundCheckerV2.cpp ------------------------------------*- C++ -*--==//
2 //
3 //                     The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file defines ArrayBoundCheckerV2, which is a path-sensitive check
11 // which looks for an out-of-bound array element access.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "ClangSACheckers.h"
16 #include "clang/StaticAnalyzer/Core/Checker.h"
17 #include "clang/StaticAnalyzer/Core/CheckerManager.h"
18 #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
19 #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
20 #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h"
21 #include "clang/AST/CharUnits.h"
22 
23 using namespace clang;
24 using namespace ento;
25 
26 namespace {
27 class ArrayBoundCheckerV2 :
28     public Checker<check::Location> {
29   mutable llvm::OwningPtr<BuiltinBug> BT;
30 
31   enum OOB_Kind { OOB_Precedes, OOB_Excedes };
32 
33   void reportOOB(CheckerContext &C, const GRState *errorState,
34                  OOB_Kind kind) const;
35 
36 public:
37   void checkLocation(SVal l, bool isLoad, CheckerContext &C) const;
38 };
39 
40 // FIXME: Eventually replace RegionRawOffset with this class.
41 class RegionRawOffsetV2 {
42 private:
43   const SubRegion *baseRegion;
44   SVal byteOffset;
45 
RegionRawOffsetV2()46   RegionRawOffsetV2()
47     : baseRegion(0), byteOffset(UnknownVal()) {}
48 
49 public:
RegionRawOffsetV2(const SubRegion * base,SVal offset)50   RegionRawOffsetV2(const SubRegion* base, SVal offset)
51     : baseRegion(base), byteOffset(offset) {}
52 
getByteOffset() const53   NonLoc getByteOffset() const { return cast<NonLoc>(byteOffset); }
getRegion() const54   const SubRegion *getRegion() const { return baseRegion; }
55 
56   static RegionRawOffsetV2 computeOffset(const GRState *state,
57                                          SValBuilder &svalBuilder,
58                                          SVal location);
59 
60   void dump() const;
61   void dumpToStream(llvm::raw_ostream& os) const;
62 };
63 }
64 
computeExtentBegin(SValBuilder & svalBuilder,const MemRegion * region)65 static SVal computeExtentBegin(SValBuilder &svalBuilder,
66                                const MemRegion *region) {
67   while (true)
68     switch (region->getKind()) {
69       default:
70         return svalBuilder.makeZeroArrayIndex();
71       case MemRegion::SymbolicRegionKind:
72         // FIXME: improve this later by tracking symbolic lower bounds
73         // for symbolic regions.
74         return UnknownVal();
75       case MemRegion::ElementRegionKind:
76         region = cast<SubRegion>(region)->getSuperRegion();
77         continue;
78     }
79 }
80 
checkLocation(SVal location,bool isLoad,CheckerContext & checkerContext) const81 void ArrayBoundCheckerV2::checkLocation(SVal location, bool isLoad,
82                                         CheckerContext &checkerContext) const {
83 
84   // NOTE: Instead of using GRState::assumeInBound(), we are prototyping
85   // some new logic here that reasons directly about memory region extents.
86   // Once that logic is more mature, we can bring it back to assumeInBound()
87   // for all clients to use.
88   //
89   // The algorithm we are using here for bounds checking is to see if the
90   // memory access is within the extent of the base region.  Since we
91   // have some flexibility in defining the base region, we can achieve
92   // various levels of conservatism in our buffer overflow checking.
93   const GRState *state = checkerContext.getState();
94   const GRState *originalState = state;
95 
96   SValBuilder &svalBuilder = checkerContext.getSValBuilder();
97   const RegionRawOffsetV2 &rawOffset =
98     RegionRawOffsetV2::computeOffset(state, svalBuilder, location);
99 
100   if (!rawOffset.getRegion())
101     return;
102 
103   // CHECK LOWER BOUND: Is byteOffset < extent begin?
104   //  If so, we are doing a load/store
105   //  before the first valid offset in the memory region.
106 
107   SVal extentBegin = computeExtentBegin(svalBuilder, rawOffset.getRegion());
108 
109   if (isa<NonLoc>(extentBegin)) {
110     SVal lowerBound
111       = svalBuilder.evalBinOpNN(state, BO_LT, rawOffset.getByteOffset(),
112                                 cast<NonLoc>(extentBegin),
113                                 svalBuilder.getConditionType());
114 
115     NonLoc *lowerBoundToCheck = dyn_cast<NonLoc>(&lowerBound);
116     if (!lowerBoundToCheck)
117       return;
118 
119     const GRState *state_precedesLowerBound, *state_withinLowerBound;
120     llvm::tie(state_precedesLowerBound, state_withinLowerBound) =
121       state->assume(*lowerBoundToCheck);
122 
123     // Are we constrained enough to definitely precede the lower bound?
124     if (state_precedesLowerBound && !state_withinLowerBound) {
125       reportOOB(checkerContext, state_precedesLowerBound, OOB_Precedes);
126       return;
127     }
128 
129     // Otherwise, assume the constraint of the lower bound.
130     assert(state_withinLowerBound);
131     state = state_withinLowerBound;
132   }
133 
134   do {
135     // CHECK UPPER BOUND: Is byteOffset >= extent(baseRegion)?  If so,
136     // we are doing a load/store after the last valid offset.
137     DefinedOrUnknownSVal extentVal =
138       rawOffset.getRegion()->getExtent(svalBuilder);
139     if (!isa<NonLoc>(extentVal))
140       break;
141 
142     SVal upperbound
143       = svalBuilder.evalBinOpNN(state, BO_GE, rawOffset.getByteOffset(),
144                                 cast<NonLoc>(extentVal),
145                                 svalBuilder.getConditionType());
146 
147     NonLoc *upperboundToCheck = dyn_cast<NonLoc>(&upperbound);
148     if (!upperboundToCheck)
149       break;
150 
151     const GRState *state_exceedsUpperBound, *state_withinUpperBound;
152     llvm::tie(state_exceedsUpperBound, state_withinUpperBound) =
153       state->assume(*upperboundToCheck);
154 
155     // Are we constrained enough to definitely exceed the upper bound?
156     if (state_exceedsUpperBound && !state_withinUpperBound) {
157       reportOOB(checkerContext, state_exceedsUpperBound, OOB_Excedes);
158       return;
159     }
160 
161     assert(state_withinUpperBound);
162     state = state_withinUpperBound;
163   }
164   while (false);
165 
166   if (state != originalState)
167     checkerContext.generateNode(state);
168 }
169 
reportOOB(CheckerContext & checkerContext,const GRState * errorState,OOB_Kind kind) const170 void ArrayBoundCheckerV2::reportOOB(CheckerContext &checkerContext,
171                                     const GRState *errorState,
172                                     OOB_Kind kind) const {
173 
174   ExplodedNode *errorNode = checkerContext.generateSink(errorState);
175   if (!errorNode)
176     return;
177 
178   if (!BT)
179     BT.reset(new BuiltinBug("Out-of-bound access"));
180 
181   // FIXME: This diagnostics are preliminary.  We should get far better
182   // diagnostics for explaining buffer overruns.
183 
184   llvm::SmallString<256> buf;
185   llvm::raw_svector_ostream os(buf);
186   os << "Out of bound memory access "
187      << (kind == OOB_Precedes ? "(accessed memory precedes memory block)"
188                               : "(access exceeds upper limit of memory block)");
189 
190   checkerContext.EmitReport(new RangedBugReport(*BT, os.str(), errorNode));
191 }
192 
dump() const193 void RegionRawOffsetV2::dump() const {
194   dumpToStream(llvm::errs());
195 }
196 
dumpToStream(llvm::raw_ostream & os) const197 void RegionRawOffsetV2::dumpToStream(llvm::raw_ostream& os) const {
198   os << "raw_offset_v2{" << getRegion() << ',' << getByteOffset() << '}';
199 }
200 
201 // FIXME: Merge with the implementation of the same method in Store.cpp
IsCompleteType(ASTContext & Ctx,QualType Ty)202 static bool IsCompleteType(ASTContext &Ctx, QualType Ty) {
203   if (const RecordType *RT = Ty->getAs<RecordType>()) {
204     const RecordDecl *D = RT->getDecl();
205     if (!D->getDefinition())
206       return false;
207   }
208 
209   return true;
210 }
211 
212 
213 // Lazily computes a value to be used by 'computeOffset'.  If 'val'
214 // is unknown or undefined, we lazily substitute '0'.  Otherwise,
215 // return 'val'.
getValue(SVal val,SValBuilder & svalBuilder)216 static inline SVal getValue(SVal val, SValBuilder &svalBuilder) {
217   return isa<UndefinedVal>(val) ? svalBuilder.makeArrayIndex(0) : val;
218 }
219 
220 // Scale a base value by a scaling factor, and return the scaled
221 // value as an SVal.  Used by 'computeOffset'.
scaleValue(const GRState * state,NonLoc baseVal,CharUnits scaling,SValBuilder & sb)222 static inline SVal scaleValue(const GRState *state,
223                               NonLoc baseVal, CharUnits scaling,
224                               SValBuilder &sb) {
225   return sb.evalBinOpNN(state, BO_Mul, baseVal,
226                         sb.makeArrayIndex(scaling.getQuantity()),
227                         sb.getArrayIndexType());
228 }
229 
230 // Add an SVal to another, treating unknown and undefined values as
231 // summing to UnknownVal.  Used by 'computeOffset'.
addValue(const GRState * state,SVal x,SVal y,SValBuilder & svalBuilder)232 static SVal addValue(const GRState *state, SVal x, SVal y,
233                      SValBuilder &svalBuilder) {
234   // We treat UnknownVals and UndefinedVals the same here because we
235   // only care about computing offsets.
236   if (x.isUnknownOrUndef() || y.isUnknownOrUndef())
237     return UnknownVal();
238 
239   return svalBuilder.evalBinOpNN(state, BO_Add,
240                                  cast<NonLoc>(x), cast<NonLoc>(y),
241                                  svalBuilder.getArrayIndexType());
242 }
243 
244 /// Compute a raw byte offset from a base region.  Used for array bounds
245 /// checking.
computeOffset(const GRState * state,SValBuilder & svalBuilder,SVal location)246 RegionRawOffsetV2 RegionRawOffsetV2::computeOffset(const GRState *state,
247                                                    SValBuilder &svalBuilder,
248                                                    SVal location)
249 {
250   const MemRegion *region = location.getAsRegion();
251   SVal offset = UndefinedVal();
252 
253   while (region) {
254     switch (region->getKind()) {
255       default: {
256         if (const SubRegion *subReg = dyn_cast<SubRegion>(region)) {
257           offset = getValue(offset, svalBuilder);
258           if (!offset.isUnknownOrUndef())
259             return RegionRawOffsetV2(subReg, offset);
260         }
261         return RegionRawOffsetV2();
262       }
263       case MemRegion::ElementRegionKind: {
264         const ElementRegion *elemReg = cast<ElementRegion>(region);
265         SVal index = elemReg->getIndex();
266         if (!isa<NonLoc>(index))
267           return RegionRawOffsetV2();
268         QualType elemType = elemReg->getElementType();
269         // If the element is an incomplete type, go no further.
270         ASTContext &astContext = svalBuilder.getContext();
271         if (!IsCompleteType(astContext, elemType))
272           return RegionRawOffsetV2();
273 
274         // Update the offset.
275         offset = addValue(state,
276                           getValue(offset, svalBuilder),
277                           scaleValue(state,
278                                      cast<NonLoc>(index),
279                                      astContext.getTypeSizeInChars(elemType),
280                                      svalBuilder),
281                           svalBuilder);
282 
283         if (offset.isUnknownOrUndef())
284           return RegionRawOffsetV2();
285 
286         region = elemReg->getSuperRegion();
287         continue;
288       }
289     }
290   }
291   return RegionRawOffsetV2();
292 }
293 
294 
registerArrayBoundCheckerV2(CheckerManager & mgr)295 void ento::registerArrayBoundCheckerV2(CheckerManager &mgr) {
296   mgr.registerChecker<ArrayBoundCheckerV2>();
297 }
298