/external/clang/lib/AST/ |
D | Stmt.cpp | 696 StringLiteral **clobbers, SourceLocation rparenloc) in GCCAsmStmt() argument 712 std::copy(clobbers, clobbers + NumClobbers, Clobbers); in GCCAsmStmt() 720 StringRef asmstr, ArrayRef<StringRef> clobbers, in MSAsmStmt() argument 723 numinputs, clobbers.size()), LBraceLoc(lbraceloc), in MSAsmStmt() 726 initialize(C, asmstr, asmtoks, constraints, exprs, clobbers); in MSAsmStmt() 737 ArrayRef<StringRef> clobbers) { in initialize() argument 739 assert(NumClobbers == clobbers.size()); in initialize() 760 std::transform(clobbers.begin(), clobbers.end(), Clobbers, in initialize()
|
/external/llvm/test/CodeGen/PowerPC/ |
D | crsave.ll | 67 tail call void asm sideeffect "# clobbers", "~{cr2}"() 77 tail call void asm sideeffect "# clobbers", "~{cr2},~{cr3},~{cr4}"()
|
D | asm-constraints.ll | 48 ; : /* No clobbers */);
|
/external/clang/test/PCH/ |
D | asm.c | 13 void call_clobbers(void) { clobbers(); } in call_clobbers()
|
D | asm.h | 10 void clobbers() { in clobbers() function
|
/external/llvm/test/CodeGen/SystemZ/ |
D | asm-17.ll | 63 ; Test clobbers of GPRs and CC. 74 ; Test clobbers of FPRs and CC.
|
/external/llvm/test/CodeGen/X86/ |
D | base-pointer-and-cmpxchg.ll | 8 ; The inline asm clobbers a bunch of registers to make sure 13 ; to clobbers rbx to set the arguments of the instruction and when
|
D | 2011-06-14-PreschedRegalias.ll | 4 ; mul wants an operand in AL, but call clobbers it.
|
D | inline-asm-sp-clobber-memcpy.ll | 8 ; Don't clobber %esi if we have inline asm that clobbers %esp.
|
D | 2012-01-16-mfence-nosse-flags.ll | 14 ; clobbers EFLAGS.
|
D | 2010-09-01-RemoveCopyByCommutingDef.ll | 13 ; The imull clobbers a 32-bit register.
|
D | 2010-06-09-FastAllocRegisters.ll | 4 ; The inline asm in this function clobbers almost all allocatable registers.
|
D | stack-align-memcpy.ll | 9 ; We can't use rep;movsl here because it clobbers the base pointer in %esi.
|
/external/swiftshader/third_party/LLVM/test/CodeGen/X86/ |
D | 2011-06-14-PreschedRegalias.ll | 4 ; mul wants an operand in AL, but call clobbers it.
|
D | 2010-09-01-RemoveCopyByCommutingDef.ll | 13 ; The imull clobbers a 32-bit register.
|
D | 2010-06-09-FastAllocRegisters.ll | 4 ; The inline asm in this function clobbers almost all allocatable registers.
|
/external/llvm/test/CodeGen/ARM/ |
D | cse-call.ll | 5 ; Don't CSE a cmp across a call that clobbers CPSR.
|
/external/clang/test/Sema/ |
D | asm.c | 22 void clobbers() { in clobbers() function
|
/external/swiftshader/third_party/subzero/tests_lit/llvm2ice_tests/ |
D | large_stack_offs.ll | 68 ; The call clobbers ip, so we need to re-create the base register. 123 ; The call clobbers ip, so we need to re-create the base register.
|
/external/clang/lib/Sema/ |
D | SemaStmtAsm.cpp | 145 Expr *asmString, MultiExprArg clobbers, in ActOnGCCAsmStmt() argument 147 unsigned NumClobbers = clobbers.size(); in ActOnGCCAsmStmt() 151 StringLiteral **Clobbers = reinterpret_cast<StringLiteral**>(clobbers.data()); in ActOnGCCAsmStmt()
|
/external/llvm/test/CodeGen/SPARC/ |
D | spill.ll | 6 ;; For i32/i64 tests, use an asm statement which clobbers most
|
/external/webrtc/ |
D | DEPS | 74 # This clobbers when necessary (based on get_landmines.py). It should be
|
/external/swiftshader/third_party/LLVM/test/CodeGen/Thumb2/ |
D | 2010-03-15-AsmCCClobber.ll | 16 ; Make sure the cmp is not scheduled before the InlineAsm that clobbers cc.
|
/external/llvm/test/CodeGen/Thumb2/ |
D | 2010-03-15-AsmCCClobber.ll | 16 ; Make sure the cmp is not scheduled before the InlineAsm that clobbers cc.
|
/external/autotest/client/tests/ltp/patches/ |
D | cpuid.patch | 45 + : "edi" /* clobbers: we hit edi directly */
|