LEFT | RIGHT |
1 //===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===// | 1 //===-- AddressSanitizer.cpp - memory error detector ------------*- C++ -*-===// |
2 // | 2 // |
3 // The LLVM Compiler Infrastructure | 3 // The LLVM Compiler Infrastructure |
4 // | 4 // |
5 // This file is distributed under the University of Illinois Open Source | 5 // This file is distributed under the University of Illinois Open Source |
6 // License. See LICENSE.TXT for details. | 6 // License. See LICENSE.TXT for details. |
7 // | 7 // |
8 //===----------------------------------------------------------------------===// | 8 //===----------------------------------------------------------------------===// |
9 // | 9 // |
10 // This file is a part of AddressSanitizer, an address sanity checker. | 10 // This file is a part of AddressSanitizer, an address sanity checker. |
11 // Details of the algorithm: | 11 // Details of the algorithm: |
12 // http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm | 12 // http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm |
13 // | 13 // |
14 //===----------------------------------------------------------------------===// | 14 //===----------------------------------------------------------------------===// |
15 | 15 |
16 #define DEBUG_TYPE "asan" | 16 #define DEBUG_TYPE "asan" |
17 | 17 |
18 #include "BlackList.h" | 18 #include "BlackList.h" |
19 #include "llvm/Function.h" | 19 #include "llvm/Function.h" |
20 #include "llvm/IRBuilder.h" | 20 #include "llvm/IRBuilder.h" |
21 #include "llvm/InlineAsm.h" | 21 #include "llvm/InlineAsm.h" |
22 #include "llvm/IntrinsicInst.h" | 22 #include "llvm/IntrinsicInst.h" |
23 #include "llvm/LLVMContext.h" | 23 #include "llvm/LLVMContext.h" |
24 #include "llvm/Module.h" | 24 #include "llvm/Module.h" |
25 #include "llvm/Type.h" | 25 #include "llvm/Type.h" |
26 #include "llvm/ADT/ArrayRef.h" | 26 #include "llvm/ADT/ArrayRef.h" |
27 #include "llvm/ADT/DenseMap.h" | |
28 #include "llvm/ADT/OwningPtr.h" | 27 #include "llvm/ADT/OwningPtr.h" |
29 #include "llvm/ADT/SmallSet.h" | 28 #include "llvm/ADT/SmallSet.h" |
30 #include "llvm/ADT/SmallString.h" | 29 #include "llvm/ADT/SmallString.h" |
31 #include "llvm/ADT/SmallVector.h" | 30 #include "llvm/ADT/SmallVector.h" |
32 #include "llvm/ADT/StringExtras.h" | 31 #include "llvm/ADT/StringExtras.h" |
33 #include "llvm/ADT/Triple.h" | 32 #include "llvm/ADT/Triple.h" |
34 #include "llvm/Analysis/MemoryBuiltins.h" | 33 #include "llvm/Analysis/MemoryBuiltins.h" |
35 #include "llvm/Analysis/ScalarEvolution.h" | 34 #include "llvm/Analysis/ScalarEvolution.h" |
36 #include "llvm/Analysis/ValueTracking.h" | 35 #include "llvm/Analysis/ValueTracking.h" |
37 #include "llvm/Support/CommandLine.h" | 36 #include "llvm/Support/CommandLine.h" |
(...skipping 95 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
133 // and benchmarking the tool. | 132 // and benchmarking the tool. |
134 static cl::opt<bool> ClOpt("asan-opt", | 133 static cl::opt<bool> ClOpt("asan-opt", |
135 cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true)); | 134 cl::desc("Optimize instrumentation"), cl::Hidden, cl::init(true)); |
136 static cl::opt<bool> ClOptSameTemp("asan-opt-same-temp", | 135 static cl::opt<bool> ClOptSameTemp("asan-opt-same-temp", |
137 cl::desc("Instrument the same temp just once"), cl::Hidden, | 136 cl::desc("Instrument the same temp just once"), cl::Hidden, |
138 cl::init(true)); | 137 cl::init(true)); |
139 static cl::opt<bool> ClOptGlobals("asan-opt-globals", | 138 static cl::opt<bool> ClOptGlobals("asan-opt-globals", |
140 cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true)); | 139 cl::desc("Don't instrument scalar globals"), cl::Hidden, cl::init(true)); |
141 static cl::opt<bool> ClOptKnownBounds("asan-opt-known-bounds", | 140 static cl::opt<bool> ClOptKnownBounds("asan-opt-known-bounds", |
142 cl::desc("Don't instrument accesses that are known to be in bounds"), | 141 cl::desc("Don't instrument accesses that are known to be in bounds"), |
143 cl::Hidden, cl::init(true)); | 142 cl::Hidden, cl::init(false)); |
144 | 143 |
145 // Debug flags. | 144 // Debug flags. |
146 static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, | 145 static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden, |
147 cl::init(0)); | 146 cl::init(0)); |
148 static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"), | 147 static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"), |
149 cl::Hidden, cl::init(0)); | 148 cl::Hidden, cl::init(0)); |
150 static cl::opt<std::string> ClDebugFunc("asan-debug-func", | 149 static cl::opt<std::string> ClDebugFunc("asan-debug-func", |
151 cl::Hidden, cl::desc("Debug func")); | 150 cl::Hidden, cl::desc("Debug func")); |
152 static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), | 151 static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"), |
153 cl::Hidden, cl::init(-1)); | 152 cl::Hidden, cl::init(-1)); |
154 static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug man inst"), | 153 static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug man inst"), |
155 cl::Hidden, cl::init(-1)); | 154 cl::Hidden, cl::init(-1)); |
156 | 155 |
157 namespace { | 156 namespace { |
158 | |
159 /// An object of this type is created while instrumenting every function. | |
160 struct AsanFunctionContext { | |
161 AsanFunctionContext(Function &Function) : F(Function) { } | |
162 | |
163 Function &F; | |
164 }; | |
165 | |
166 /// AddressSanitizer: instrument the code in module to find memory bugs. | 157 /// AddressSanitizer: instrument the code in module to find memory bugs. |
167 struct AddressSanitizer : public ModulePass { | 158 struct AddressSanitizer : public FunctionPass { |
168 AddressSanitizer(); | 159 AddressSanitizer(); |
169 virtual const char *getPassName() const; | 160 virtual const char *getPassName() const; |
170 void instrumentMop(AsanFunctionContext &AFC, Instruction *I); | 161 void instrumentMop(Instruction *I); |
171 void instrumentAddress(AsanFunctionContext &AFC, | 162 void instrumentAddress(Instruction *OrigIns, IRBuilder<> &IRB, |
172 Instruction *OrigIns, IRBuilder<> &IRB, | |
173 Value *Addr, uint32_t TypeSize, bool IsWrite); | 163 Value *Addr, uint32_t TypeSize, bool IsWrite); |
174 Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, | 164 Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong, |
175 Value *ShadowValue, uint32_t TypeSize); | 165 Value *ShadowValue, uint32_t TypeSize); |
176 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr, | 166 Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr, |
177 bool IsWrite, size_t AccessSizeIndex); | 167 bool IsWrite, size_t AccessSizeIndex); |
178 bool isMemoryAccessAlwaysInBounds(AsanFunctionContext &AFC, Value *Ptr, | 168 bool isMemoryAccessAlwaysInBounds(Value *Ptr, Value *Size); |
179 Value *Size); | 169 bool instrumentMemIntrinsic(MemIntrinsic *MI); |
180 bool instrumentMemIntrinsic(AsanFunctionContext &AFC, MemIntrinsic *MI); | 170 void instrumentMemIntrinsicParam(Instruction *OrigIns, Value *Addr, |
181 void instrumentMemIntrinsicParam(AsanFunctionContext &AFC, | |
182 Instruction *OrigIns, Value *Addr, | |
183 Value *Size, | 171 Value *Size, |
184 Instruction *InsertBefore, bool IsWrite); | 172 Instruction *InsertBefore, bool IsWrite); |
185 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB); | 173 Value *memToShadow(Value *Shadow, IRBuilder<> &IRB); |
186 bool handleFunction(Module &M, Function &F); | 174 bool runOnFunction(Function &F); |
187 void createInitializerPoisonCalls(Module &M, | 175 void createInitializerPoisonCalls(Module &M, |
188 Value *FirstAddr, Value *LastAddr); | 176 Value *FirstAddr, Value *LastAddr); |
189 bool maybeInsertAsanInitAtFunctionEntry(Function &F); | 177 bool maybeInsertAsanInitAtFunctionEntry(Function &F); |
190 bool poisonStackInFunction(Module &M, Function &F); | 178 bool poisonStackInFunction(Function &F); |
191 virtual bool runOnModule(Module &M); | 179 virtual bool doInitialization(Module &M); |
| 180 virtual bool doFinalization(Module &M); |
192 bool insertGlobalRedzones(Module &M); | 181 bool insertGlobalRedzones(Module &M); |
193 static char ID; // Pass identification, replacement for typeid | 182 static char ID; // Pass identification, replacement for typeid |
194 | 183 |
195 virtual void getAnalysisUsage(AnalysisUsage &AU) const { | 184 virtual void getAnalysisUsage(AnalysisUsage &AU) const { |
196 AU.addRequired<DataLayout>(); | 185 AU.addRequired<DataLayout>(); |
197 AU.addRequired<ScalarEvolution>(); | 186 AU.addRequired<ScalarEvolution>(); |
198 } | 187 } |
199 | 188 |
200 private: | 189 private: |
201 uint64_t getAllocaSizeInBytes(AllocaInst *AI) { | 190 uint64_t getAllocaSizeInBytes(AllocaInst *AI) { |
(...skipping 12 matching lines...) Expand all Loading... |
214 | 203 |
215 Function *checkInterfaceFunction(Constant *FuncOrBitcast); | 204 Function *checkInterfaceFunction(Constant *FuncOrBitcast); |
216 bool ShouldInstrumentGlobal(GlobalVariable *G); | 205 bool ShouldInstrumentGlobal(GlobalVariable *G); |
217 void PoisonStack(const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> IRB, | 206 void PoisonStack(const ArrayRef<AllocaInst*> &AllocaVec, IRBuilder<> IRB, |
218 Value *ShadowBase, bool DoPoison); | 207 Value *ShadowBase, bool DoPoison); |
219 bool LooksLikeCodeInBug11395(Instruction *I); | 208 bool LooksLikeCodeInBug11395(Instruction *I); |
220 void FindDynamicInitializers(Module &M); | 209 void FindDynamicInitializers(Module &M); |
221 bool HasDynamicInitializer(GlobalVariable *G); | 210 bool HasDynamicInitializer(GlobalVariable *G); |
222 | 211 |
223 LLVMContext *C; | 212 LLVMContext *C; |
| 213 ScalarEvolution *SE; |
224 DataLayout *TD; | 214 DataLayout *TD; |
225 uint64_t MappingOffset; | 215 uint64_t MappingOffset; |
226 int MappingScale; | 216 int MappingScale; |
227 size_t RedzoneSize; | 217 size_t RedzoneSize; |
228 int LongSize; | 218 int LongSize; |
229 Type *IntptrTy; | 219 Type *IntptrTy; |
230 Type *IntptrPtrTy; | 220 Type *IntptrPtrTy; |
231 Function *AsanCtorFunction; | 221 Function *AsanCtorFunction; |
232 Function *AsanInitFunction; | 222 Function *AsanInitFunction; |
| 223 Function *AsanStackMallocFunc, *AsanStackFreeFunc; |
| 224 Function *AsanHandleNoReturnFunc; |
233 Instruction *CtorInsertBefore; | 225 Instruction *CtorInsertBefore; |
234 OwningPtr<BlackList> BL; | 226 OwningPtr<BlackList> BL; |
235 // This array is indexed by AccessIsWrite and log2(AccessSize). | 227 // This array is indexed by AccessIsWrite and log2(AccessSize). |
236 Function *AsanErrorCallback[2][kNumberOfAccessSizes]; | 228 Function *AsanErrorCallback[2][kNumberOfAccessSizes]; |
237 InlineAsm *EmptyAsm; | 229 InlineAsm *EmptyAsm; |
238 SmallSet<GlobalValue*, 32> DynamicallyInitializedGlobals; | 230 SmallSet<GlobalValue*, 32> DynamicallyInitializedGlobals; |
239 DenseMap<Value*, uint64_t> InitialGVSize; | |
240 }; | 231 }; |
241 | 232 |
242 } // namespace | 233 } // namespace |
243 | 234 |
244 char AddressSanitizer::ID = 0; | 235 char AddressSanitizer::ID = 0; |
245 INITIALIZE_PASS(AddressSanitizer, "asan", | 236 INITIALIZE_PASS(AddressSanitizer, "asan", |
246 "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", | 237 "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", |
247 false, false) | 238 false, false) |
248 AddressSanitizer::AddressSanitizer() : ModulePass(ID) { } | 239 AddressSanitizer::AddressSanitizer() : FunctionPass(ID) { } |
249 ModulePass *llvm::createAddressSanitizerPass() { | 240 FunctionPass *llvm::createAddressSanitizerPass() { |
250 return new AddressSanitizer(); | 241 return new AddressSanitizer(); |
251 } | 242 } |
252 | 243 |
253 const char *AddressSanitizer::getPassName() const { | 244 const char *AddressSanitizer::getPassName() const { |
254 return "AddressSanitizer"; | 245 return "AddressSanitizer"; |
255 } | 246 } |
256 | 247 |
257 static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { | 248 static size_t TypeSizeToSizeIndex(uint32_t TypeSize) { |
258 size_t Res = CountTrailingZeros_32(TypeSize / 8); | 249 size_t Res = CountTrailingZeros_32(TypeSize / 8); |
259 assert(Res < kNumberOfAccessSizes); | 250 assert(Res < kNumberOfAccessSizes); |
260 return Res; | 251 return Res; |
261 } | 252 } |
262 | 253 |
263 // Create a constant for Str so that we can pass it to the run-time lib. | 254 // Create a constant for Str so that we can pass it to the run-time lib. |
264 static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str) { | 255 static GlobalVariable *createPrivateGlobalForString(Module &M, StringRef Str) { |
265 Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str); | 256 Constant *StrConst = ConstantDataArray::getString(M.getContext(), Str); |
266 return new GlobalVariable(M, StrConst->getType(), true, | 257 return new GlobalVariable(M, StrConst->getType(), true, |
267 GlobalValue::PrivateLinkage, StrConst, ""); | 258 GlobalValue::PrivateLinkage, StrConst, ""); |
268 } | |
269 | |
270 // Split the basic block and insert an if-then code. | |
271 // Before: | |
272 // Head | |
273 // Cmp | |
274 // Tail | |
275 // After: | |
276 // Head | |
277 // if (Cmp) | |
278 // ThenBlock | |
279 // Tail | |
280 // | |
281 // ThenBlock block is created and its terminator is returned. | |
282 // If Unreachable, ThenBlock is terminated with UnreachableInst, otherwise | |
283 // it is terminated with BranchInst to Tail. | |
284 static TerminatorInst *splitBlockAndInsertIfThen(Value *Cmp, bool Unreachable) { | |
285 Instruction *SplitBefore = cast<Instruction>(Cmp)->getNextNode(); | |
286 BasicBlock *Head = SplitBefore->getParent(); | |
287 BasicBlock *Tail = Head->splitBasicBlock(SplitBefore); | |
288 TerminatorInst *HeadOldTerm = Head->getTerminator(); | |
289 LLVMContext &C = Head->getParent()->getParent()->getContext(); | |
290 BasicBlock *ThenBlock = BasicBlock::Create(C, "", Head->getParent(), Tail); | |
291 TerminatorInst *CheckTerm; | |
292 if (Unreachable) | |
293 CheckTerm = new UnreachableInst(C, ThenBlock); | |
294 else | |
295 CheckTerm = BranchInst::Create(Tail, ThenBlock); | |
296 BranchInst *HeadNewTerm = | |
297 BranchInst::Create(/*ifTrue*/ThenBlock, /*ifFalse*/Tail, Cmp); | |
298 ReplaceInstWithInst(HeadOldTerm, HeadNewTerm); | |
299 return CheckTerm; | |
300 } | 259 } |
301 | 260 |
302 Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) { | 261 Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) { |
303 // Shadow >> scale | 262 // Shadow >> scale |
304 Shadow = IRB.CreateLShr(Shadow, MappingScale); | 263 Shadow = IRB.CreateLShr(Shadow, MappingScale); |
305 if (MappingOffset == 0) | 264 if (MappingOffset == 0) |
306 return Shadow; | 265 return Shadow; |
307 // (Shadow >> scale) | offset | 266 // (Shadow >> scale) | offset |
308 return IRB.CreateOr(Shadow, ConstantInt::get(IntptrTy, | 267 return IRB.CreateOr(Shadow, ConstantInt::get(IntptrTy, |
309 MappingOffset)); | 268 MappingOffset)); |
310 } | 269 } |
311 | 270 |
312 // Return true if the only argument is an allocation of a memory object that | 271 // Return true if the only argument is an allocation of a memory object that |
313 // can't be freed. | 272 // can't be freed nor resized. This disqualifies global variables with linkage |
| 273 // types that allow different-sized objects to be merged (for example weak_odr). |
314 static bool isSimpleMemoryObject(Value *V) { | 274 static bool isSimpleMemoryObject(Value *V) { |
315 if (AllocaInst *AI = dyn_cast<AllocaInst>(V)) | 275 if (AllocaInst *AI = dyn_cast<AllocaInst>(V)) |
316 return AI->isStaticAlloca(); | 276 return AI->isStaticAlloca(); |
317 | 277 |
318 if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) { | 278 if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) { |
319 if (GV->isDeclaration()) | 279 if (GV->isDeclaration()) |
320 return false; | 280 return false; |
321 return GV->hasExternalLinkage() || GV->hasLocalLinkage(); | 281 return GV->hasExternalLinkage() || GV->hasLocalLinkage(); |
322 } | 282 } |
323 | 283 |
324 if (Argument *AI = dyn_cast<Argument>(V)) | 284 if (Argument *AI = dyn_cast<Argument>(V)) |
325 return AI->hasByValAttr(); | 285 return AI->hasByValAttr(); |
326 | 286 |
327 return false; | 287 return false; |
328 } | 288 } |
329 | 289 |
330 // Return true if the memory access [Ptr, Ptr + Size) is always valid. | 290 // Return true if the memory access [Ptr, Ptr + Size) is always valid. |
331 bool AddressSanitizer::isMemoryAccessAlwaysInBounds(AsanFunctionContext &AFC, | 291 bool AddressSanitizer::isMemoryAccessAlwaysInBounds(Value *Ptr, Value *Size) { |
332 Value *Ptr, Value *Size) { | |
333 SmallVector <Value*, 4> Objects; | 292 SmallVector <Value*, 4> Objects; |
334 GetUnderlyingObjects(Ptr, Objects, TD); | 293 GetUnderlyingObjects(Ptr, Objects, TD); |
335 if (Objects.size() != 1) | 294 if (Objects.size() != 1) |
336 return false; | 295 return false; |
337 | 296 |
338 Value *ObjPtr = Objects[0]; | 297 Value *ObjPtr = Objects[0]; |
339 if (!isSimpleMemoryObject(ObjPtr)) | 298 if (!isSimpleMemoryObject(ObjPtr)) |
340 return false; | 299 return false; |
341 | 300 |
342 uint64_t ObjSize; | 301 uint64_t ObjSize; |
343 DenseMap<Value*, uint64_t>::const_iterator It = InitialGVSize.find(ObjPtr); | 302 bool FoundSize = getObjectSize(ObjPtr, ObjSize, TD, NULL, false); |
344 if (It != InitialGVSize.end()) { | 303 assert(FoundSize && "Simple memory objects should have known size."); |
345 // This is a global variable with a redzone. It->second is the initial size. | |
346 ObjSize = It->second; | |
347 } else { | |
348 assert(getObjectSize(ObjPtr, ObjSize, TD, NULL, /*RoundToAlign=*/false)); | |
349 } | |
350 Value *ObjSizeValue = ConstantInt::get(Size->getType(), ObjSize); | 304 Value *ObjSizeValue = ConstantInt::get(Size->getType(), ObjSize); |
351 | |
352 ScalarEvolution *SE = &getAnalysis<ScalarEvolution>(AFC.F); | |
353 SE->setDataLayout(TD); | |
354 | 305 |
355 const SCEV *AccessPtrSCEV = SE->getSCEV(Ptr); | 306 const SCEV *AccessPtrSCEV = SE->getSCEV(Ptr); |
356 const SCEV *ObjPtrSCEV = SE->getSCEV(ObjPtr); | 307 const SCEV *ObjPtrSCEV = SE->getSCEV(ObjPtr); |
357 const SCEV *Offset = SE->getMinusSCEV(AccessPtrSCEV, ObjPtrSCEV); | 308 const SCEV *Offset = SE->getMinusSCEV(AccessPtrSCEV, ObjPtrSCEV); |
358 const SCEV *ObjSizeSCEV = SE->getSCEV(ObjSizeValue); | 309 const SCEV *ObjSizeSCEV = SE->getSCEV(ObjSizeValue); |
359 Offset = SE->getTruncateOrZeroExtend(Offset, ObjSizeSCEV->getType()); | |
360 // Is the access known to start before or at the object's end? | 310 // Is the access known to start before or at the object's end? |
361 if (!SE->isKnownPredicate(ICmpInst::ICMP_ULE, Offset, ObjSizeSCEV)) | 311 if (!SE->isKnownPredicate(ICmpInst::ICMP_ULE, Offset, ObjSizeSCEV)) |
362 return false; | 312 return false; |
363 | 313 |
364 const SCEV *RemainingRoom = SE->getMinusSCEV(ObjSizeSCEV, Offset); | 314 const SCEV *RemainingRoom = SE->getMinusSCEV(ObjSizeSCEV, Offset); |
365 const SCEV *AccessSizeSCEV = SE->getSCEV(Size); | 315 const SCEV *AccessSizeSCEV = SE->getSCEV(Size); |
366 // Is the access known to end before before or at the object's end? | 316 // Is the access known to end before before or at the object's end? |
367 if (!SE->isKnownPredicate(ICmpInst::ICMP_UGE, RemainingRoom, AccessSizeSCEV)) | 317 if (!SE->isKnownPredicate(ICmpInst::ICMP_UGE, RemainingRoom, AccessSizeSCEV)) |
368 return false; | 318 return false; |
369 | 319 |
370 return true; | 320 return true; |
371 } | 321 } |
372 | 322 |
373 void AddressSanitizer::instrumentMemIntrinsicParam( | 323 void AddressSanitizer::instrumentMemIntrinsicParam( |
374 AsanFunctionContext &AFC, Instruction *OrigIns, | 324 Instruction *OrigIns, |
375 Value *Addr, Value *Size, Instruction *InsertBefore, bool IsWrite) { | 325 Value *Addr, Value *Size, Instruction *InsertBefore, bool IsWrite) { |
376 // Do we need to check it at run time? | 326 // Do we need to check it at run time? |
377 if (ClOpt && ClOptKnownBounds) { | 327 if (ClOpt && ClOptKnownBounds && isMemoryAccessAlwaysInBounds(Addr, Size)) |
378 if (isMemoryAccessAlwaysInBounds(AFC, Addr, Size)) | |
379 return; | 328 return; |
380 } | |
381 | 329 |
382 // Check the first byte. | 330 // Check the first byte. |
383 { | 331 { |
384 IRBuilder<> IRB(InsertBefore); | 332 IRBuilder<> IRB(InsertBefore); |
385 instrumentAddress(AFC, OrigIns, IRB, Addr, 8, IsWrite); | 333 instrumentAddress(OrigIns, IRB, Addr, 8, IsWrite); |
386 } | 334 } |
387 // Check the last byte. | 335 // Check the last byte. |
388 { | 336 { |
389 IRBuilder<> IRB(InsertBefore); | 337 IRBuilder<> IRB(InsertBefore); |
390 Value *SizeMinusOne = IRB.CreateSub( | 338 Value *SizeMinusOne = IRB.CreateSub( |
391 Size, ConstantInt::get(Size->getType(), 1)); | 339 Size, ConstantInt::get(Size->getType(), 1)); |
392 SizeMinusOne = IRB.CreateIntCast(SizeMinusOne, IntptrTy, false); | 340 SizeMinusOne = IRB.CreateIntCast(SizeMinusOne, IntptrTy, false); |
393 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); | 341 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); |
394 Value *AddrPlusSizeMinisOne = IRB.CreateAdd(AddrLong, SizeMinusOne); | 342 Value *AddrPlusSizeMinisOne = IRB.CreateAdd(AddrLong, SizeMinusOne); |
395 instrumentAddress(AFC, OrigIns, IRB, AddrPlusSizeMinisOne, 8, IsWrite); | 343 instrumentAddress(OrigIns, IRB, AddrPlusSizeMinisOne, 8, IsWrite); |
396 } | 344 } |
397 } | 345 } |
398 | 346 |
399 // Instrument memset/memmove/memcpy | 347 // Instrument memset/memmove/memcpy |
400 bool AddressSanitizer::instrumentMemIntrinsic(AsanFunctionContext &AFC, | 348 bool AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) { |
401 MemIntrinsic *MI) { | |
402 Value *Dst = MI->getDest(); | 349 Value *Dst = MI->getDest(); |
403 MemTransferInst *MemTran = dyn_cast<MemTransferInst>(MI); | 350 MemTransferInst *MemTran = dyn_cast<MemTransferInst>(MI); |
404 Value *Src = MemTran ? MemTran->getSource() : 0; | 351 Value *Src = MemTran ? MemTran->getSource() : 0; |
405 Value *Length = MI->getLength(); | 352 Value *Length = MI->getLength(); |
406 | 353 |
407 Constant *ConstLength = dyn_cast<Constant>(Length); | 354 Constant *ConstLength = dyn_cast<Constant>(Length); |
408 Instruction *InsertBefore = MI; | 355 Instruction *InsertBefore = MI; |
409 if (ConstLength) { | 356 if (ConstLength) { |
410 if (ConstLength->isNullValue()) return false; | 357 if (ConstLength->isNullValue()) return false; |
411 } else { | 358 } else { |
412 // The size is not a constant so it could be zero -- check at run-time. | 359 // The size is not a constant so it could be zero -- check at run-time. |
413 IRBuilder<> IRB(InsertBefore); | 360 IRBuilder<> IRB(InsertBefore); |
414 | 361 |
415 Value *Cmp = IRB.CreateICmpNE(Length, | 362 Value *Cmp = IRB.CreateICmpNE(Length, |
416 Constant::getNullValue(Length->getType())); | 363 Constant::getNullValue(Length->getType())); |
417 InsertBefore = splitBlockAndInsertIfThen(Cmp, false); | 364 InsertBefore = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false); |
418 } | 365 } |
419 | 366 |
420 instrumentMemIntrinsicParam(AFC, MI, Dst, Length, InsertBefore, true); | 367 instrumentMemIntrinsicParam(MI, Dst, Length, InsertBefore, true); |
421 if (Src) | 368 if (Src) |
422 instrumentMemIntrinsicParam(AFC, MI, Src, Length, InsertBefore, false); | 369 instrumentMemIntrinsicParam(MI, Src, Length, InsertBefore, false); |
423 return true; | 370 return true; |
424 } | 371 } |
425 | 372 |
426 // If I is an interesting memory access, return the PointerOperand | 373 // If I is an interesting memory access, return the PointerOperand |
427 // and set IsWrite. Otherwise return NULL. | 374 // and set IsWrite. Otherwise return NULL. |
428 static Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite) { | 375 static Value *isInterestingMemoryAccess(Instruction *I, bool *IsWrite) { |
429 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { | 376 if (LoadInst *LI = dyn_cast<LoadInst>(I)) { |
430 if (!ClInstrumentReads) return NULL; | 377 if (!ClInstrumentReads) return NULL; |
431 *IsWrite = false; | 378 *IsWrite = false; |
432 return LI->getPointerOperand(); | 379 return LI->getPointerOperand(); |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
466 | 413 |
467 GlobalVariable *G = cast<GlobalVariable>(VG); | 414 GlobalVariable *G = cast<GlobalVariable>(VG); |
468 DynamicallyInitializedGlobals.insert(G); | 415 DynamicallyInitializedGlobals.insert(G); |
469 } | 416 } |
470 } | 417 } |
471 // Returns true if a global variable is initialized dynamically in this TU. | 418 // Returns true if a global variable is initialized dynamically in this TU. |
472 bool AddressSanitizer::HasDynamicInitializer(GlobalVariable *G) { | 419 bool AddressSanitizer::HasDynamicInitializer(GlobalVariable *G) { |
473 return DynamicallyInitializedGlobals.count(G); | 420 return DynamicallyInitializedGlobals.count(G); |
474 } | 421 } |
475 | 422 |
476 void AddressSanitizer::instrumentMop(AsanFunctionContext &AFC, Instruction *I) { | 423 void AddressSanitizer::instrumentMop(Instruction *I) { |
477 bool IsWrite = false; | 424 bool IsWrite = false; |
478 Value *Addr = isInterestingMemoryAccess(I, &IsWrite); | 425 Value *Addr = isInterestingMemoryAccess(I, &IsWrite); |
479 assert(Addr); | 426 assert(Addr); |
480 if (ClOpt && ClOptGlobals) { | 427 if (ClOpt && ClOptGlobals) { |
481 if (GlobalVariable *G = dyn_cast<GlobalVariable>(Addr)) { | 428 if (GlobalVariable *G = dyn_cast<GlobalVariable>(Addr)) { |
482 // If initialization order checking is disabled, a simple access to a | 429 // If initialization order checking is disabled, a simple access to a |
483 // dynamically initialized global is always valid. | 430 // dynamically initialized global is always valid. |
484 if (!ClInitializers) | 431 if (!ClInitializers) |
485 return; | 432 return; |
486 // If a global variable does not have dynamic initialization we don't | 433 // If a global variable does not have dynamic initialization we don't |
(...skipping 12 matching lines...) Expand all Loading... |
499 assert(OrigTy->isSized()); | 446 assert(OrigTy->isSized()); |
500 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy); | 447 uint32_t TypeSize = TD->getTypeStoreSizeInBits(OrigTy); |
501 | 448 |
502 if (TypeSize != 8 && TypeSize != 16 && | 449 if (TypeSize != 8 && TypeSize != 16 && |
503 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) { | 450 TypeSize != 32 && TypeSize != 64 && TypeSize != 128) { |
504 // Ignore all unusual sizes. | 451 // Ignore all unusual sizes. |
505 return; | 452 return; |
506 } | 453 } |
507 | 454 |
508 if (ClOpt && ClOptKnownBounds) { | 455 if (ClOpt && ClOptKnownBounds) { |
509 Type *PtrSizeTy = TD->getIntPtrType(OrigPtrTy->getContext()); | 456 Type *PtrSizeTy = TD->getIntPtrType(OrigPtrTy); |
510 Value *Size = ConstantInt::get(PtrSizeTy, TypeSize / 8); | 457 Value *Size = ConstantInt::get(PtrSizeTy, TypeSize / 8); |
511 if (isMemoryAccessAlwaysInBounds(AFC, Addr, Size)) | 458 if (isMemoryAccessAlwaysInBounds(Addr, Size)) |
512 return; | 459 return; |
513 } | 460 } |
514 | 461 |
515 IRBuilder<> IRB(I); | 462 IRBuilder<> IRB(I); |
516 instrumentAddress(AFC, I, IRB, Addr, TypeSize, IsWrite); | 463 instrumentAddress(I, IRB, Addr, TypeSize, IsWrite); |
517 } | 464 } |
518 | 465 |
519 // Validate the result of Module::getOrInsertFunction called for an interface | 466 // Validate the result of Module::getOrInsertFunction called for an interface |
520 // function of AddressSanitizer. If the instrumented module defines a function | 467 // function of AddressSanitizer. If the instrumented module defines a function |
521 // with the same name, their prototypes must match, otherwise | 468 // with the same name, their prototypes must match, otherwise |
522 // getOrInsertFunction returns a bitcast. | 469 // getOrInsertFunction returns a bitcast. |
523 Function *AddressSanitizer::checkInterfaceFunction(Constant *FuncOrBitcast) { | 470 Function *AddressSanitizer::checkInterfaceFunction(Constant *FuncOrBitcast) { |
524 if (isa<Function>(FuncOrBitcast)) return cast<Function>(FuncOrBitcast); | 471 if (isa<Function>(FuncOrBitcast)) return cast<Function>(FuncOrBitcast); |
525 FuncOrBitcast->dump(); | 472 FuncOrBitcast->dump(); |
526 report_fatal_error("trying to redefine an AddressSanitizer " | 473 report_fatal_error("trying to redefine an AddressSanitizer " |
(...skipping 24 matching lines...) Expand all Loading... |
551 if (TypeSize / 8 > 1) | 498 if (TypeSize / 8 > 1) |
552 LastAccessedByte = IRB.CreateAdd( | 499 LastAccessedByte = IRB.CreateAdd( |
553 LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)); | 500 LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)); |
554 // (uint8_t) ((Addr & (Granularity-1)) + size - 1) | 501 // (uint8_t) ((Addr & (Granularity-1)) + size - 1) |
555 LastAccessedByte = IRB.CreateIntCast( | 502 LastAccessedByte = IRB.CreateIntCast( |
556 LastAccessedByte, ShadowValue->getType(), false); | 503 LastAccessedByte, ShadowValue->getType(), false); |
557 // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue | 504 // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue |
558 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue); | 505 return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue); |
559 } | 506 } |
560 | 507 |
561 void AddressSanitizer::instrumentAddress(AsanFunctionContext &AFC, | 508 void AddressSanitizer::instrumentAddress(Instruction *OrigIns, |
562 Instruction *OrigIns, | |
563 IRBuilder<> &IRB, Value *Addr, | 509 IRBuilder<> &IRB, Value *Addr, |
564 uint32_t TypeSize, bool IsWrite) { | 510 uint32_t TypeSize, bool IsWrite) { |
565 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); | 511 Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy); |
566 | 512 |
567 Type *ShadowTy = IntegerType::get( | 513 Type *ShadowTy = IntegerType::get( |
568 *C, std::max(8U, TypeSize >> MappingScale)); | 514 *C, std::max(8U, TypeSize >> MappingScale)); |
569 Type *ShadowPtrTy = PointerType::get(ShadowTy, 0); | 515 Type *ShadowPtrTy = PointerType::get(ShadowTy, 0); |
570 Value *ShadowPtr = memToShadow(AddrLong, IRB); | 516 Value *ShadowPtr = memToShadow(AddrLong, IRB); |
571 Value *CmpVal = Constant::getNullValue(ShadowTy); | 517 Value *CmpVal = Constant::getNullValue(ShadowTy); |
572 Value *ShadowValue = IRB.CreateLoad( | 518 Value *ShadowValue = IRB.CreateLoad( |
573 IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy)); | 519 IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy)); |
574 | 520 |
575 Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal); | 521 Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal); |
576 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize); | 522 size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize); |
577 size_t Granularity = 1 << MappingScale; | 523 size_t Granularity = 1 << MappingScale; |
578 TerminatorInst *CrashTerm = 0; | 524 TerminatorInst *CrashTerm = 0; |
579 | 525 |
580 if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) { | 526 if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) { |
581 TerminatorInst *CheckTerm = splitBlockAndInsertIfThen(Cmp, false); | 527 TerminatorInst *CheckTerm = |
| 528 SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), false); |
582 assert(dyn_cast<BranchInst>(CheckTerm)->isUnconditional()); | 529 assert(dyn_cast<BranchInst>(CheckTerm)->isUnconditional()); |
583 BasicBlock *NextBB = CheckTerm->getSuccessor(0); | 530 BasicBlock *NextBB = CheckTerm->getSuccessor(0); |
584 IRB.SetInsertPoint(CheckTerm); | 531 IRB.SetInsertPoint(CheckTerm); |
585 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize); | 532 Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize); |
586 BasicBlock *CrashBlock = BasicBlock::Create(*C, "", &AFC.F, NextBB); | 533 BasicBlock *CrashBlock = |
| 534 BasicBlock::Create(*C, "", NextBB->getParent(), NextBB); |
587 CrashTerm = new UnreachableInst(*C, CrashBlock); | 535 CrashTerm = new UnreachableInst(*C, CrashBlock); |
588 BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2); | 536 BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2); |
589 ReplaceInstWithInst(CheckTerm, NewTerm); | 537 ReplaceInstWithInst(CheckTerm, NewTerm); |
590 } else { | 538 } else { |
591 CrashTerm = splitBlockAndInsertIfThen(Cmp, true); | 539 CrashTerm = SplitBlockAndInsertIfThen(cast<Instruction>(Cmp), true); |
592 } | 540 } |
593 | 541 |
594 Instruction *Crash = | 542 Instruction *Crash = |
595 generateCrashCode(CrashTerm, AddrLong, IsWrite, AccessSizeIndex); | 543 generateCrashCode(CrashTerm, AddrLong, IsWrite, AccessSizeIndex); |
596 Crash->setDebugLoc(OrigIns->getDebugLoc()); | 544 Crash->setDebugLoc(OrigIns->getDebugLoc()); |
597 } | 545 } |
598 | 546 |
599 void AddressSanitizer::createInitializerPoisonCalls(Module &M, | 547 void AddressSanitizer::createInitializerPoisonCalls(Module &M, |
600 Value *FirstAddr, | 548 Value *FirstAddr, |
601 Value *LastAddr) { | 549 Value *LastAddr) { |
(...skipping 22 matching lines...) Expand all Loading... |
624 for (Function::iterator I = GlobalInit->begin(), E = GlobalInit->end(); | 572 for (Function::iterator I = GlobalInit->begin(), E = GlobalInit->end(); |
625 I != E; ++I) { | 573 I != E; ++I) { |
626 if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) { | 574 if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) { |
627 CallInst::Create(AsanUnpoisonGlobals, "", RI); | 575 CallInst::Create(AsanUnpoisonGlobals, "", RI); |
628 } | 576 } |
629 } | 577 } |
630 } | 578 } |
631 | 579 |
632 bool AddressSanitizer::ShouldInstrumentGlobal(GlobalVariable *G) { | 580 bool AddressSanitizer::ShouldInstrumentGlobal(GlobalVariable *G) { |
633 Type *Ty = cast<PointerType>(G->getType())->getElementType(); | 581 Type *Ty = cast<PointerType>(G->getType())->getElementType(); |
634 DEBUG(dbgs() << "GLOBAL: " << *G); | 582 DEBUG(dbgs() << "GLOBAL: " << *G << "\n"); |
635 | 583 |
636 if (BL->isIn(*G)) return false; | 584 if (BL->isIn(*G)) return false; |
637 if (!Ty->isSized()) return false; | 585 if (!Ty->isSized()) return false; |
638 if (!G->hasInitializer()) return false; | 586 if (!G->hasInitializer()) return false; |
639 // Touch only those globals that will not be defined in other modules. | 587 // Touch only those globals that will not be defined in other modules. |
640 // Don't handle ODR type linkages since other modules may be built w/o asan. | 588 // Don't handle ODR type linkages since other modules may be built w/o asan. |
641 if (G->getLinkage() != GlobalVariable::ExternalLinkage && | 589 if (G->getLinkage() != GlobalVariable::ExternalLinkage && |
642 G->getLinkage() != GlobalVariable::PrivateLinkage && | 590 G->getLinkage() != GlobalVariable::PrivateLinkage && |
643 G->getLinkage() != GlobalVariable::InternalLinkage) | 591 G->getLinkage() != GlobalVariable::InternalLinkage) |
644 return false; | 592 return false; |
(...skipping 101 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
746 DescriptionOfGlobal += M.getModuleIdentifier(); | 694 DescriptionOfGlobal += M.getModuleIdentifier(); |
747 DescriptionOfGlobal += ")"; | 695 DescriptionOfGlobal += ")"; |
748 GlobalVariable *Name = createPrivateGlobalForString(M, DescriptionOfGlobal); | 696 GlobalVariable *Name = createPrivateGlobalForString(M, DescriptionOfGlobal); |
749 | 697 |
750 // Create a new global variable with enough space for a redzone. | 698 // Create a new global variable with enough space for a redzone. |
751 GlobalVariable *NewGlobal = new GlobalVariable( | 699 GlobalVariable *NewGlobal = new GlobalVariable( |
752 M, NewTy, G->isConstant(), G->getLinkage(), | 700 M, NewTy, G->isConstant(), G->getLinkage(), |
753 NewInitializer, "", G, G->getThreadLocalMode()); | 701 NewInitializer, "", G, G->getThreadLocalMode()); |
754 NewGlobal->copyAttributesFrom(G); | 702 NewGlobal->copyAttributesFrom(G); |
755 NewGlobal->setAlignment(RedzoneSize); | 703 NewGlobal->setAlignment(RedzoneSize); |
756 InitialGVSize[NewGlobal] = SizeInBytes; | |
757 | 704 |
758 Value *Indices2[2]; | 705 Value *Indices2[2]; |
759 Indices2[0] = IRB.getInt32(0); | 706 Indices2[0] = IRB.getInt32(0); |
760 Indices2[1] = IRB.getInt32(0); | 707 Indices2[1] = IRB.getInt32(0); |
761 | 708 |
762 G->replaceAllUsesWith( | 709 G->replaceAllUsesWith( |
763 ConstantExpr::getGetElementPtr(NewGlobal, Indices2, true)); | 710 ConstantExpr::getGetElementPtr(NewGlobal, Indices2, true)); |
764 NewGlobal->takeName(G); | 711 NewGlobal->takeName(G); |
765 G->eraseFromParent(); | 712 G->eraseFromParent(); |
766 | 713 |
767 Initializers[i] = ConstantStruct::get( | 714 Initializers[i] = ConstantStruct::get( |
768 GlobalStructTy, | 715 GlobalStructTy, |
769 ConstantExpr::getPointerCast(NewGlobal, IntptrTy), | 716 ConstantExpr::getPointerCast(NewGlobal, IntptrTy), |
770 ConstantInt::get(IntptrTy, SizeInBytes), | 717 ConstantInt::get(IntptrTy, SizeInBytes), |
771 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize), | 718 ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize), |
772 ConstantExpr::getPointerCast(Name, IntptrTy), | 719 ConstantExpr::getPointerCast(Name, IntptrTy), |
773 ConstantInt::get(IntptrTy, GlobalHasDynamicInitializer), | 720 ConstantInt::get(IntptrTy, GlobalHasDynamicInitializer), |
774 NULL); | 721 NULL); |
775 | 722 |
776 // Populate the first and last globals declared in this TU. | 723 // Populate the first and last globals declared in this TU. |
777 if (ClInitializers && GlobalHasDynamicInitializer) { | 724 if (ClInitializers && GlobalHasDynamicInitializer) { |
778 LastDynamic = ConstantExpr::getPointerCast(NewGlobal, IntptrTy); | 725 LastDynamic = ConstantExpr::getPointerCast(NewGlobal, IntptrTy); |
779 if (FirstDynamic == 0) | 726 if (FirstDynamic == 0) |
780 FirstDynamic = LastDynamic; | 727 FirstDynamic = LastDynamic; |
781 } | 728 } |
782 | 729 |
783 DEBUG(dbgs() << "NEW GLOBAL:\n" << *NewGlobal); | 730 DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n"); |
784 } | 731 } |
785 | 732 |
786 ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n); | 733 ArrayType *ArrayOfGlobalStructTy = ArrayType::get(GlobalStructTy, n); |
787 GlobalVariable *AllGlobals = new GlobalVariable( | 734 GlobalVariable *AllGlobals = new GlobalVariable( |
788 M, ArrayOfGlobalStructTy, false, GlobalVariable::PrivateLinkage, | 735 M, ArrayOfGlobalStructTy, false, GlobalVariable::PrivateLinkage, |
789 ConstantArray::get(ArrayOfGlobalStructTy, Initializers), ""); | 736 ConstantArray::get(ArrayOfGlobalStructTy, Initializers), ""); |
790 | 737 |
791 // Create calls for poisoning before initializers run and unpoisoning after. | 738 // Create calls for poisoning before initializers run and unpoisoning after. |
792 if (ClInitializers && FirstDynamic && LastDynamic) | 739 if (ClInitializers && FirstDynamic && LastDynamic) |
793 createInitializerPoisonCalls(M, FirstDynamic, LastDynamic); | 740 createInitializerPoisonCalls(M, FirstDynamic, LastDynamic); |
(...skipping 23 matching lines...) Expand all Loading... |
817 IRB_Dtor.CreateCall2(AsanUnregisterGlobals, | 764 IRB_Dtor.CreateCall2(AsanUnregisterGlobals, |
818 IRB.CreatePointerCast(AllGlobals, IntptrTy), | 765 IRB.CreatePointerCast(AllGlobals, IntptrTy), |
819 ConstantInt::get(IntptrTy, n)); | 766 ConstantInt::get(IntptrTy, n)); |
820 appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndCtorPriority); | 767 appendToGlobalDtors(M, AsanDtorFunction, kAsanCtorAndCtorPriority); |
821 | 768 |
822 DEBUG(dbgs() << M); | 769 DEBUG(dbgs() << M); |
823 return true; | 770 return true; |
824 } | 771 } |
825 | 772 |
826 // virtual | 773 // virtual |
827 bool AddressSanitizer::runOnModule(Module &M) { | 774 bool AddressSanitizer::doInitialization(Module &M) { |
828 // Initialize the private fields. No one has accessed them before. | 775 // Initialize the private fields. No one has accessed them before. |
829 TD = &getAnalysis<DataLayout>(); | 776 TD = getAnalysisIfAvailable<DataLayout>(); |
| 777 |
| 778 if (!TD) |
| 779 return false; |
830 BL.reset(new BlackList(ClBlackListFile)); | 780 BL.reset(new BlackList(ClBlackListFile)); |
831 | 781 |
832 C = &(M.getContext()); | 782 C = &(M.getContext()); |
833 LongSize = TD->getPointerSizeInBits(); | 783 LongSize = TD->getPointerSizeInBits(0); |
834 IntptrTy = Type::getIntNTy(*C, LongSize); | 784 IntptrTy = Type::getIntNTy(*C, LongSize); |
835 IntptrPtrTy = PointerType::get(IntptrTy, 0); | 785 IntptrPtrTy = PointerType::get(IntptrTy, 0); |
836 | 786 |
837 AsanCtorFunction = Function::Create( | 787 AsanCtorFunction = Function::Create( |
838 FunctionType::get(Type::getVoidTy(*C), false), | 788 FunctionType::get(Type::getVoidTy(*C), false), |
839 GlobalValue::InternalLinkage, kAsanModuleCtorName, &M); | 789 GlobalValue::InternalLinkage, kAsanModuleCtorName, &M); |
840 BasicBlock *AsanCtorBB = BasicBlock::Create(*C, "", AsanCtorFunction); | 790 BasicBlock *AsanCtorBB = BasicBlock::Create(*C, "", AsanCtorFunction); |
841 CtorInsertBefore = ReturnInst::Create(*C, AsanCtorBB); | 791 CtorInsertBefore = ReturnInst::Create(*C, AsanCtorBB); |
842 | 792 |
843 // call __asan_init in the module ctor. | 793 // call __asan_init in the module ctor. |
844 IRBuilder<> IRB(CtorInsertBefore); | 794 IRBuilder<> IRB(CtorInsertBefore); |
845 AsanInitFunction = checkInterfaceFunction( | 795 AsanInitFunction = checkInterfaceFunction( |
846 M.getOrInsertFunction(kAsanInitName, IRB.getVoidTy(), NULL)); | 796 M.getOrInsertFunction(kAsanInitName, IRB.getVoidTy(), NULL)); |
847 AsanInitFunction->setLinkage(Function::ExternalLinkage); | 797 AsanInitFunction->setLinkage(Function::ExternalLinkage); |
848 IRB.CreateCall(AsanInitFunction); | 798 IRB.CreateCall(AsanInitFunction); |
849 | 799 |
850 // Create __asan_report* callbacks. | 800 // Create __asan_report* callbacks. |
851 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) { | 801 for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) { |
852 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; | 802 for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes; |
853 AccessSizeIndex++) { | 803 AccessSizeIndex++) { |
854 // IsWrite and TypeSize are encoded in the function name. | 804 // IsWrite and TypeSize are encoded in the function name. |
855 std::string FunctionName = std::string(kAsanReportErrorTemplate) + | 805 std::string FunctionName = std::string(kAsanReportErrorTemplate) + |
856 (AccessIsWrite ? "store" : "load") + itostr(1 << AccessSizeIndex); | 806 (AccessIsWrite ? "store" : "load") + itostr(1 << AccessSizeIndex); |
857 // If we are merging crash callbacks, they have two parameters. | 807 // If we are merging crash callbacks, they have two parameters. |
858 AsanErrorCallback[AccessIsWrite][AccessSizeIndex] = cast<Function>( | 808 AsanErrorCallback[AccessIsWrite][AccessSizeIndex] = cast<Function>( |
859 M.getOrInsertFunction(FunctionName, IRB.getVoidTy(), IntptrTy, NULL)); | 809 M.getOrInsertFunction(FunctionName, IRB.getVoidTy(), IntptrTy, NULL)); |
860 } | 810 } |
861 } | 811 } |
| 812 |
| 813 AsanStackMallocFunc = checkInterfaceFunction(M.getOrInsertFunction( |
| 814 kAsanStackMallocName, IntptrTy, IntptrTy, IntptrTy, NULL)); |
| 815 AsanStackFreeFunc = checkInterfaceFunction(M.getOrInsertFunction( |
| 816 kAsanStackFreeName, IRB.getVoidTy(), |
| 817 IntptrTy, IntptrTy, IntptrTy, NULL)); |
| 818 AsanHandleNoReturnFunc = checkInterfaceFunction(M.getOrInsertFunction( |
| 819 kAsanHandleNoReturnName, IRB.getVoidTy(), NULL)); |
| 820 |
862 // We insert an empty inline asm after __asan_report* to avoid callback merge. | 821 // We insert an empty inline asm after __asan_report* to avoid callback merge. |
863 EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false), | 822 EmptyAsm = InlineAsm::get(FunctionType::get(IRB.getVoidTy(), false), |
864 StringRef(""), StringRef(""), | 823 StringRef(""), StringRef(""), |
865 /*hasSideEffects=*/true); | 824 /*hasSideEffects=*/true); |
866 | 825 |
867 llvm::Triple targetTriple(M.getTargetTriple()); | 826 llvm::Triple targetTriple(M.getTargetTriple()); |
868 bool isAndroid = targetTriple.getEnvironment() == llvm::Triple::Android; | 827 bool isAndroid = targetTriple.getEnvironment() == llvm::Triple::Android; |
869 | 828 |
870 MappingOffset = isAndroid ? kDefaultShadowOffsetAndroid : | 829 MappingOffset = isAndroid ? kDefaultShadowOffsetAndroid : |
871 (LongSize == 32 ? kDefaultShadowOffset32 : kDefaultShadowOffset64); | 830 (LongSize == 32 ? kDefaultShadowOffset32 : kDefaultShadowOffset64); |
872 if (ClMappingOffsetLog >= 0) { | 831 if (ClMappingOffsetLog >= 0) { |
873 if (ClMappingOffsetLog == 0) { | 832 if (ClMappingOffsetLog == 0) { |
874 // special case | 833 // special case |
875 MappingOffset = 0; | 834 MappingOffset = 0; |
876 } else { | 835 } else { |
877 MappingOffset = 1ULL << ClMappingOffsetLog; | 836 MappingOffset = 1ULL << ClMappingOffsetLog; |
878 } | 837 } |
879 } | 838 } |
880 MappingScale = kDefaultShadowScale; | 839 MappingScale = kDefaultShadowScale; |
881 if (ClMappingScale) { | 840 if (ClMappingScale) { |
882 MappingScale = ClMappingScale; | 841 MappingScale = ClMappingScale; |
883 } | 842 } |
884 // Redzone used for stack and globals is at least 32 bytes. | 843 // Redzone used for stack and globals is at least 32 bytes. |
885 // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively. | 844 // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively. |
886 RedzoneSize = std::max(32, (int)(1 << MappingScale)); | 845 RedzoneSize = std::max(32, (int)(1 << MappingScale)); |
887 | 846 |
888 bool Res = false; | |
889 | |
890 if (ClGlobals) | |
891 Res |= insertGlobalRedzones(M); | |
892 | 847 |
893 if (ClMappingOffsetLog >= 0) { | 848 if (ClMappingOffsetLog >= 0) { |
894 // Tell the run-time the current values of mapping offset and scale. | 849 // Tell the run-time the current values of mapping offset and scale. |
895 GlobalValue *asan_mapping_offset = | 850 GlobalValue *asan_mapping_offset = |
896 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, | 851 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, |
897 ConstantInt::get(IntptrTy, MappingOffset), | 852 ConstantInt::get(IntptrTy, MappingOffset), |
898 kAsanMappingOffsetName); | 853 kAsanMappingOffsetName); |
899 // Read the global, otherwise it may be optimized away. | 854 // Read the global, otherwise it may be optimized away. |
900 IRB.CreateLoad(asan_mapping_offset, true); | 855 IRB.CreateLoad(asan_mapping_offset, true); |
901 } | 856 } |
902 if (ClMappingScale) { | 857 if (ClMappingScale) { |
903 GlobalValue *asan_mapping_scale = | 858 GlobalValue *asan_mapping_scale = |
904 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, | 859 new GlobalVariable(M, IntptrTy, true, GlobalValue::LinkOnceODRLinkage, |
905 ConstantInt::get(IntptrTy, MappingScale), | 860 ConstantInt::get(IntptrTy, MappingScale), |
906 kAsanMappingScaleName); | 861 kAsanMappingScaleName); |
907 // Read the global, otherwise it may be optimized away. | 862 // Read the global, otherwise it may be optimized away. |
908 IRB.CreateLoad(asan_mapping_scale, true); | 863 IRB.CreateLoad(asan_mapping_scale, true); |
909 } | 864 } |
910 | 865 |
911 | |
912 for (Module::iterator F = M.begin(), E = M.end(); F != E; ++F) { | |
913 if (F->isDeclaration()) continue; | |
914 Res |= handleFunction(M, *F); | |
915 } | |
916 | |
917 appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority); | 866 appendToGlobalCtors(M, AsanCtorFunction, kAsanCtorAndCtorPriority); |
918 | 867 |
919 return Res; | 868 return true; |
920 } | 869 } |
| 870 |
| 871 bool AddressSanitizer::doFinalization(Module &M) { |
| 872 // We transform the globals at the very end so that the optimization analysis |
| 873 // works on the original globals. |
| 874 if (ClGlobals) |
| 875 return insertGlobalRedzones(M); |
| 876 return false; |
| 877 } |
| 878 |
921 | 879 |
922 bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) { | 880 bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) { |
923 // For each NSObject descendant having a +load method, this method is invoked | 881 // For each NSObject descendant having a +load method, this method is invoked |
924 // by the ObjC runtime before any of the static constructors is called. | 882 // by the ObjC runtime before any of the static constructors is called. |
925 // Therefore we need to instrument such methods with a call to __asan_init | 883 // Therefore we need to instrument such methods with a call to __asan_init |
926 // at the beginning in order to initialize our runtime before any access to | 884 // at the beginning in order to initialize our runtime before any access to |
927 // the shadow memory. | 885 // the shadow memory. |
928 // We cannot just ignore these methods, because they may call other | 886 // We cannot just ignore these methods, because they may call other |
929 // instrumented functions. | 887 // instrumented functions. |
930 if (F.getName().find(" load]") != std::string::npos) { | 888 if (F.getName().find(" load]") != std::string::npos) { |
931 IRBuilder<> IRB(F.begin()->begin()); | 889 IRBuilder<> IRB(F.begin()->begin()); |
932 IRB.CreateCall(AsanInitFunction); | 890 IRB.CreateCall(AsanInitFunction); |
933 return true; | 891 return true; |
934 } | 892 } |
935 return false; | 893 return false; |
936 } | 894 } |
937 | 895 |
938 bool AddressSanitizer::handleFunction(Module &M, Function &F) { | 896 bool AddressSanitizer::runOnFunction(Function &F) { |
939 if (BL->isIn(F)) return false; | 897 if (BL->isIn(F)) return false; |
940 if (&F == AsanCtorFunction) return false; | 898 if (&F == AsanCtorFunction) return false; |
| 899 DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n"); |
941 | 900 |
942 // If needed, insert __asan_init before checking for AddressSafety attr. | 901 // If needed, insert __asan_init before checking for AddressSafety attr. |
943 maybeInsertAsanInitAtFunctionEntry(F); | 902 maybeInsertAsanInitAtFunctionEntry(F); |
944 | 903 |
945 if (!F.getFnAttributes().hasAttribute(Attributes::AddressSafety)) | 904 if (!F.getFnAttributes().hasAttribute(Attributes::AddressSafety)) |
946 return false; | 905 return false; |
947 | 906 |
948 if (!ClDebugFunc.empty() && ClDebugFunc != F.getName()) | 907 if (!ClDebugFunc.empty() && ClDebugFunc != F.getName()) |
949 return false; | 908 return false; |
| 909 |
| 910 SE = &getAnalysis<ScalarEvolution>(); |
950 | 911 |
951 // We want to instrument every address only once per basic block (unless there | 912 // We want to instrument every address only once per basic block (unless there |
952 // are calls between uses). | 913 // are calls between uses). |
953 SmallSet<Value*, 16> TempsToInstrument; | 914 SmallSet<Value*, 16> TempsToInstrument; |
954 SmallVector<Instruction*, 16> ToInstrument; | 915 SmallVector<Instruction*, 16> ToInstrument; |
955 SmallVector<Instruction*, 8> NoReturnCalls; | 916 SmallVector<Instruction*, 8> NoReturnCalls; |
956 bool IsWrite; | 917 bool IsWrite; |
957 | 918 |
958 // Fill the set of memory operations to instrument. | 919 // Fill the set of memory operations to instrument. |
959 for (Function::iterator FI = F.begin(), FE = F.end(); | 920 for (Function::iterator FI = F.begin(), FE = F.end(); |
(...skipping 20 matching lines...) Expand all Loading... |
980 } | 941 } |
981 continue; | 942 continue; |
982 } | 943 } |
983 ToInstrument.push_back(BI); | 944 ToInstrument.push_back(BI); |
984 NumInsnsPerBB++; | 945 NumInsnsPerBB++; |
985 if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) | 946 if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) |
986 break; | 947 break; |
987 } | 948 } |
988 } | 949 } |
989 | 950 |
990 AsanFunctionContext AFC(F); | |
991 | |
992 // Instrument. | 951 // Instrument. |
993 int NumInstrumented = 0; | 952 int NumInstrumented = 0; |
994 for (size_t i = 0, n = ToInstrument.size(); i != n; i++) { | 953 for (size_t i = 0, n = ToInstrument.size(); i != n; i++) { |
995 Instruction *Inst = ToInstrument[i]; | 954 Instruction *Inst = ToInstrument[i]; |
996 if (ClDebugMin < 0 || ClDebugMax < 0 || | 955 if (ClDebugMin < 0 || ClDebugMax < 0 || |
997 (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) { | 956 (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) { |
998 if (isInterestingMemoryAccess(Inst, &IsWrite)) | 957 if (isInterestingMemoryAccess(Inst, &IsWrite)) |
999 instrumentMop(AFC, Inst); | 958 instrumentMop(Inst); |
1000 else | 959 else |
1001 instrumentMemIntrinsic(AFC, cast<MemIntrinsic>(Inst)); | 960 instrumentMemIntrinsic(cast<MemIntrinsic>(Inst)); |
1002 } | 961 } |
1003 NumInstrumented++; | 962 NumInstrumented++; |
1004 } | 963 } |
1005 | 964 |
1006 DEBUG(dbgs() << F); | 965 bool ChangedStack = poisonStackInFunction(F); |
1007 | |
1008 bool ChangedStack = poisonStackInFunction(M, F); | |
1009 | 966 |
1010 // We must unpoison the stack before every NoReturn call (throw, _exit, etc). | 967 // We must unpoison the stack before every NoReturn call (throw, _exit, etc). |
1011 // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37 | 968 // See e.g. http://code.google.com/p/address-sanitizer/issues/detail?id=37 |
1012 for (size_t i = 0, n = NoReturnCalls.size(); i != n; i++) { | 969 for (size_t i = 0, n = NoReturnCalls.size(); i != n; i++) { |
1013 Instruction *CI = NoReturnCalls[i]; | 970 Instruction *CI = NoReturnCalls[i]; |
1014 IRBuilder<> IRB(CI); | 971 IRBuilder<> IRB(CI); |
1015 IRB.CreateCall(M.getOrInsertFunction(kAsanHandleNoReturnName, | 972 IRB.CreateCall(AsanHandleNoReturnFunc); |
1016 IRB.getVoidTy(), NULL)); | 973 } |
1017 } | 974 DEBUG(dbgs() << "ASAN done instrumenting:\n" << F << "\n"); |
1018 | 975 |
1019 return NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty(); | 976 return NumInstrumented > 0 || ChangedStack || !NoReturnCalls.empty(); |
1020 } | 977 } |
1021 | 978 |
1022 static uint64_t ValueForPoison(uint64_t PoisonByte, size_t ShadowRedzoneSize) { | 979 static uint64_t ValueForPoison(uint64_t PoisonByte, size_t ShadowRedzoneSize) { |
1023 if (ShadowRedzoneSize == 1) return PoisonByte; | 980 if (ShadowRedzoneSize == 1) return PoisonByte; |
1024 if (ShadowRedzoneSize == 2) return (PoisonByte << 8) + PoisonByte; | 981 if (ShadowRedzoneSize == 2) return (PoisonByte << 8) + PoisonByte; |
1025 if (ShadowRedzoneSize == 4) | 982 if (ShadowRedzoneSize == 4) |
1026 return (PoisonByte << 24) + (PoisonByte << 16) + | 983 return (PoisonByte << 24) + (PoisonByte << 16) + |
1027 (PoisonByte << 8) + (PoisonByte); | 984 (PoisonByte << 8) + (PoisonByte); |
(...skipping 92 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1120 // | 1077 // |
1121 // Stack poisoning does not play well with exception handling. | 1078 // Stack poisoning does not play well with exception handling. |
1122 // When an exception is thrown, we essentially bypass the code | 1079 // When an exception is thrown, we essentially bypass the code |
1123 // that unpoisones the stack. This is why the run-time library has | 1080 // that unpoisones the stack. This is why the run-time library has |
1124 // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire | 1081 // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire |
1125 // stack in the interceptor. This however does not work inside the | 1082 // stack in the interceptor. This however does not work inside the |
1126 // actual function which catches the exception. Most likely because the | 1083 // actual function which catches the exception. Most likely because the |
1127 // compiler hoists the load of the shadow value somewhere too high. | 1084 // compiler hoists the load of the shadow value somewhere too high. |
1128 // This causes asan to report a non-existing bug on 453.povray. | 1085 // This causes asan to report a non-existing bug on 453.povray. |
1129 // It sounds like an LLVM bug. | 1086 // It sounds like an LLVM bug. |
1130 bool AddressSanitizer::poisonStackInFunction(Module &M, Function &F) { | 1087 bool AddressSanitizer::poisonStackInFunction(Function &F) { |
1131 if (!ClStack) return false; | 1088 if (!ClStack) return false; |
1132 SmallVector<AllocaInst*, 16> AllocaVec; | 1089 SmallVector<AllocaInst*, 16> AllocaVec; |
1133 SmallVector<Instruction*, 8> RetVec; | 1090 SmallVector<Instruction*, 8> RetVec; |
1134 uint64_t TotalSize = 0; | 1091 uint64_t TotalSize = 0; |
1135 | 1092 |
1136 // Filter out Alloca instructions we want (and can) handle. | 1093 // Filter out Alloca instructions we want (and can) handle. |
1137 // Collect Ret instructions. | 1094 // Collect Ret instructions. |
1138 for (Function::iterator FI = F.begin(), FE = F.end(); | 1095 for (Function::iterator FI = F.begin(), FE = F.end(); |
1139 FI != FE; ++FI) { | 1096 FI != FE; ++FI) { |
1140 BasicBlock &BB = *FI; | 1097 BasicBlock &BB = *FI; |
(...skipping 29 matching lines...) Expand all Loading... |
1170 | 1127 |
1171 Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize); | 1128 Type *ByteArrayTy = ArrayType::get(IRB.getInt8Ty(), LocalStackSize); |
1172 AllocaInst *MyAlloca = | 1129 AllocaInst *MyAlloca = |
1173 new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore); | 1130 new AllocaInst(ByteArrayTy, "MyAlloca", InsBefore); |
1174 MyAlloca->setAlignment(RedzoneSize); | 1131 MyAlloca->setAlignment(RedzoneSize); |
1175 assert(MyAlloca->isStaticAlloca()); | 1132 assert(MyAlloca->isStaticAlloca()); |
1176 Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy); | 1133 Value *OrigStackBase = IRB.CreatePointerCast(MyAlloca, IntptrTy); |
1177 Value *LocalStackBase = OrigStackBase; | 1134 Value *LocalStackBase = OrigStackBase; |
1178 | 1135 |
1179 if (DoStackMalloc) { | 1136 if (DoStackMalloc) { |
1180 Value *AsanStackMallocFunc = M.getOrInsertFunction( | |
1181 kAsanStackMallocName, IntptrTy, IntptrTy, IntptrTy, NULL); | |
1182 LocalStackBase = IRB.CreateCall2(AsanStackMallocFunc, | 1137 LocalStackBase = IRB.CreateCall2(AsanStackMallocFunc, |
1183 ConstantInt::get(IntptrTy, LocalStackSize), OrigStackBase); | 1138 ConstantInt::get(IntptrTy, LocalStackSize), OrigStackBase); |
1184 } | 1139 } |
1185 | 1140 |
1186 // This string will be parsed by the run-time (DescribeStackAddress). | 1141 // This string will be parsed by the run-time (DescribeStackAddress). |
1187 SmallString<2048> StackDescriptionStorage; | 1142 SmallString<2048> StackDescriptionStorage; |
1188 raw_svector_ostream StackDescription(StackDescriptionStorage); | 1143 raw_svector_ostream StackDescription(StackDescriptionStorage); |
1189 StackDescription << F.getName() << " " << AllocaVec.size() << " "; | 1144 StackDescription << F.getName() << " " << AllocaVec.size() << " "; |
1190 | 1145 |
1191 uint64_t Pos = RedzoneSize; | 1146 uint64_t Pos = RedzoneSize; |
(...skipping 15 matching lines...) Expand all Loading... |
1207 assert(Pos == LocalStackSize); | 1162 assert(Pos == LocalStackSize); |
1208 | 1163 |
1209 // Write the Magic value and the frame description constant to the redzone. | 1164 // Write the Magic value and the frame description constant to the redzone. |
1210 Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy); | 1165 Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy); |
1211 IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic), | 1166 IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic), |
1212 BasePlus0); | 1167 BasePlus0); |
1213 Value *BasePlus1 = IRB.CreateAdd(LocalStackBase, | 1168 Value *BasePlus1 = IRB.CreateAdd(LocalStackBase, |
1214 ConstantInt::get(IntptrTy, LongSize/8)); | 1169 ConstantInt::get(IntptrTy, LongSize/8)); |
1215 BasePlus1 = IRB.CreateIntToPtr(BasePlus1, IntptrPtrTy); | 1170 BasePlus1 = IRB.CreateIntToPtr(BasePlus1, IntptrPtrTy); |
1216 Value *Description = IRB.CreatePointerCast( | 1171 Value *Description = IRB.CreatePointerCast( |
1217 createPrivateGlobalForString(M, StackDescription.str()), | 1172 createPrivateGlobalForString(*F.getParent(), StackDescription.str()), |
1218 IntptrTy); | 1173 IntptrTy); |
1219 IRB.CreateStore(Description, BasePlus1); | 1174 IRB.CreateStore(Description, BasePlus1); |
1220 | 1175 |
1221 // Poison the stack redzones at the entry. | 1176 // Poison the stack redzones at the entry. |
1222 Value *ShadowBase = memToShadow(LocalStackBase, IRB); | 1177 Value *ShadowBase = memToShadow(LocalStackBase, IRB); |
1223 PoisonStack(ArrayRef<AllocaInst*>(AllocaVec), IRB, ShadowBase, true); | 1178 PoisonStack(ArrayRef<AllocaInst*>(AllocaVec), IRB, ShadowBase, true); |
1224 | |
1225 Value *AsanStackFreeFunc = NULL; | |
1226 if (DoStackMalloc) { | |
1227 AsanStackFreeFunc = M.getOrInsertFunction( | |
1228 kAsanStackFreeName, IRB.getVoidTy(), | |
1229 IntptrTy, IntptrTy, IntptrTy, NULL); | |
1230 } | |
1231 | 1179 |
1232 // Unpoison the stack before all ret instructions. | 1180 // Unpoison the stack before all ret instructions. |
1233 for (size_t i = 0, n = RetVec.size(); i < n; i++) { | 1181 for (size_t i = 0, n = RetVec.size(); i < n; i++) { |
1234 Instruction *Ret = RetVec[i]; | 1182 Instruction *Ret = RetVec[i]; |
1235 IRBuilder<> IRBRet(Ret); | 1183 IRBuilder<> IRBRet(Ret); |
1236 | 1184 |
1237 // Mark the current frame as retired. | 1185 // Mark the current frame as retired. |
1238 IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic), | 1186 IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic), |
1239 BasePlus0); | 1187 BasePlus0); |
1240 // Unpoison the stack. | 1188 // Unpoison the stack. |
1241 PoisonStack(ArrayRef<AllocaInst*>(AllocaVec), IRBRet, ShadowBase, false); | 1189 PoisonStack(ArrayRef<AllocaInst*>(AllocaVec), IRBRet, ShadowBase, false); |
1242 | 1190 |
1243 if (DoStackMalloc) { | 1191 if (DoStackMalloc) { |
1244 IRBRet.CreateCall3(AsanStackFreeFunc, LocalStackBase, | 1192 IRBRet.CreateCall3(AsanStackFreeFunc, LocalStackBase, |
1245 ConstantInt::get(IntptrTy, LocalStackSize), | 1193 ConstantInt::get(IntptrTy, LocalStackSize), |
1246 OrigStackBase); | 1194 OrigStackBase); |
1247 } | 1195 } |
1248 } | 1196 } |
1249 | 1197 |
| 1198 // We are done. Remove the old unused alloca instructions. |
| 1199 for (size_t i = 0, n = AllocaVec.size(); i < n; i++) |
| 1200 AllocaVec[i]->eraseFromParent(); |
| 1201 |
1250 if (ClDebugStack) { | 1202 if (ClDebugStack) { |
1251 DEBUG(dbgs() << F); | 1203 DEBUG(dbgs() << F); |
1252 } | 1204 } |
1253 | 1205 |
1254 return true; | 1206 return true; |
1255 } | 1207 } |
LEFT | RIGHT |