/external/rust/crates/grpcio-sys/grpc/src/core/lib/iomgr/ |
D | work_serializer.cc | 58 const size_t prev_size = size_.FetchAdd(1); in Run() local 60 GPR_DEBUG_ASSERT(prev_size > 0); in Run() 61 if (prev_size == 1) { in Run() 86 size_t prev_size = size_.FetchSub(1); in Orphan() local 87 if (prev_size == 1) { in Orphan() 104 size_t prev_size = size_.FetchSub(1); in DrainQueue() local 105 GPR_DEBUG_ASSERT(prev_size >= 1); in DrainQueue() 108 if (prev_size == 1) { in DrainQueue() 115 if (prev_size == 2) { in DrainQueue()
|
D | call_combiner.cc | 122 size_t prev_size = in Start() local 125 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in Start() 126 prev_size + 1); in Start() 129 if (prev_size == 0) { in Start() 154 size_t prev_size = in Stop() local 157 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in Stop() 158 prev_size - 1); in Stop() 160 GPR_ASSERT(prev_size >= 1); in Stop() 161 if (prev_size > 1) { in Stop()
|
/external/grpc-grpc/src/core/lib/iomgr/ |
D | call_combiner.cc | 73 size_t prev_size = static_cast<size_t>( in grpc_call_combiner_start() local 76 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in grpc_call_combiner_start() 77 prev_size + 1); in grpc_call_combiner_start() 80 if (prev_size == 0) { in grpc_call_combiner_start() 108 size_t prev_size = static_cast<size_t>( in grpc_call_combiner_stop() local 111 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in grpc_call_combiner_stop() 112 prev_size - 1); in grpc_call_combiner_stop() 114 GPR_ASSERT(prev_size >= 1); in grpc_call_combiner_stop() 115 if (prev_size > 1) { in grpc_call_combiner_stop()
|
/external/pcre/dist2/src/sljit/ |
D | sljitExecAllocator.c | 202 sljit_uw prev_size; member 269 header->prev_size = chunk_size; in sljit_malloc_exec() 270 AS_BLOCK_HEADER(header, size)->prev_size = size; in sljit_malloc_exec() 295 header->prev_size = 0; in sljit_malloc_exec() 303 free_block->header.prev_size = size; in sljit_malloc_exec() 314 next_header->prev_size = chunk_size; in sljit_malloc_exec() 332 free_block = AS_FREE_BLOCK(header, -(sljit_sw)header->prev_size); in sljit_free_exec() 336 header->prev_size = free_block->size; in sljit_free_exec() 348 header->prev_size = free_block->size; in sljit_free_exec() 352 if (SLJIT_UNLIKELY(!free_block->header.prev_size && header->size == 1)) { in sljit_free_exec() [all …]
|
D | sljitProtExecAllocator.c | 268 sljit_uw prev_size; member 338 header->prev_size = chunk_size; in sljit_malloc_exec() 340 AS_BLOCK_HEADER(header, size)->prev_size = size; in sljit_malloc_exec() 371 header->prev_size = 0; in sljit_malloc_exec() 380 free_block->header.prev_size = size; in sljit_malloc_exec() 392 next_header->prev_size = chunk_size; in sljit_malloc_exec() 412 free_block = AS_FREE_BLOCK(header, -(sljit_sw)header->prev_size); in sljit_free_exec() 416 header->prev_size = free_block->size; in sljit_free_exec() 428 header->prev_size = free_block->size; in sljit_free_exec() 432 if (SLJIT_UNLIKELY(!free_block->header.prev_size && header->size == 1)) { in sljit_free_exec() [all …]
|
/external/grpc-grpc/third_party/nanopb/ |
D | pb_common.c | 42 size_t prev_size = prev_field->data_size; in pb_field_iter_next() local 48 prev_size = 0; in pb_field_iter_next() 56 prev_size *= prev_field->array_size; in pb_field_iter_next() 62 prev_size = sizeof(void*); in pb_field_iter_next() 72 iter->pData = (char*)iter->pData + prev_size + iter->pos->data_offset; in pb_field_iter_next()
|
/external/nanopb-c/ |
D | pb_common.c | 42 size_t prev_size = prev_field->data_size; in pb_field_iter_next() local 56 prev_size *= prev_field->array_size; in pb_field_iter_next() 62 prev_size = sizeof(void*); in pb_field_iter_next() 72 iter->pData = (char*)iter->pData + prev_size + iter->pos->data_offset; in pb_field_iter_next()
|
/external/jemalloc_new/test/unit/ |
D | bitmap.c | 141 size_t prev_size) { in test_bitmap_size_body() argument 145 assert_zu_ge(size, prev_size, "Bitmap size is smaller than expected"); in test_bitmap_size_body() 150 size_t nbits, prev_size; in TEST_BEGIN() local 152 prev_size = 0; in TEST_BEGIN() 156 prev_size = test_bitmap_size_body(&binfo, nbits, prev_size); in TEST_BEGIN() 160 prev_size = test_bitmap_size_body(&binfo, nbits, \ in TEST_BEGIN() 161 prev_size); \ in TEST_BEGIN() 163 prev_size = 0; in TEST_BEGIN()
|
/external/mesa3d/src/amd/compiler/ |
D | aco_print_asm.cpp | 190 unsigned prev_size = 0; in print_asm() local 195 if (pos + prev_size <= exec_size && prev_pos != pos && !new_block && in print_asm() 196 memcmp(&binary[prev_pos], &binary[pos], prev_size * 4) == 0) { in print_asm() 198 pos += prev_size; in print_asm() 223 prev_size = res.second; in print_asm()
|
/external/llvm-project/lldb/source/Breakpoint/ |
D | BreakpointLocationCollection.cpp | 124 size_t prev_size = GetSize(); in ShouldStop() local 125 while (i < prev_size) { in ShouldStop() 130 if (prev_size == GetSize()) in ShouldStop() 132 prev_size = GetSize(); in ShouldStop()
|
/external/tensorflow/tensorflow/core/kernels/boosted_trees/quantiles/ |
D | weighted_quantiles_summary_test.cc | 132 int prev_size = 1; in TEST_F() local 157 size += prev_size; in TEST_F() 158 prev_size = last_size; in TEST_F()
|
/external/llvm-project/lldb/source/Symbol/ |
D | Symtab.cpp | 446 uint32_t prev_size = indexes.size(); in AppendSymbolIndexesWithType() local 455 return indexes.size() - prev_size; in AppendSymbolIndexesWithType() 464 uint32_t prev_size = indexes.size(); in AppendSymbolIndexesWithTypeAndFlagsValue() local 475 return indexes.size() - prev_size; in AppendSymbolIndexesWithTypeAndFlagsValue() 486 uint32_t prev_size = indexes.size(); in AppendSymbolIndexesWithType() local 498 return indexes.size() - prev_size; in AppendSymbolIndexesWithType() 678 uint32_t prev_size = indexes.size(); in AppendSymbolIndexesMatchingRegExAndType() local 691 return indexes.size() - prev_size; in AppendSymbolIndexesMatchingRegExAndType() 700 uint32_t prev_size = indexes.size(); in AppendSymbolIndexesMatchingRegExAndType() local 716 return indexes.size() - prev_size; in AppendSymbolIndexesMatchingRegExAndType()
|
/external/perfetto/src/kallsyms/ |
D | kernel_symbol_map.cc | 197 const size_t prev_size = buf_.size(); in Add() local 198 buf_.resize(prev_size + token_size); in Add() 199 char* tok_wptr = &buf_[prev_size]; in Add()
|
/external/libxml2/ |
D | error.c | 24 int size, prev_size = -1; \ 39 if (prev_size == chars) { \ 42 prev_size = chars; \
|
/external/llvm-project/llvm/include/llvm/CodeGen/GlobalISel/ |
D | LegalizerInfo.h | 1372 int prev_size = -1; in checkPartialSizeAndActionsVector() local 1374 assert(SizeAndAction.first > prev_size); in checkPartialSizeAndActionsVector() 1375 prev_size = SizeAndAction.first; in checkPartialSizeAndActionsVector()
|
/external/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/GlobalISel/ |
D | LegalizerInfo.h | 1247 int prev_size = -1; in checkPartialSizeAndActionsVector() local 1249 assert(SizeAndAction.first > prev_size); in checkPartialSizeAndActionsVector() 1250 prev_size = SizeAndAction.first; in checkPartialSizeAndActionsVector()
|
/external/jemalloc_new/src/ |
D | extent.c | 1640 size_t prev_size; in extent_record() local 1642 prev_size = extent_size_get(extent); in extent_record() 1648 extent_size_get(extent) >= prev_size + LARGE_MINCLASS); in extent_record()
|
/external/llvm-project/lldb/source/Plugins/SymbolFile/DWARF/ |
D | SymbolFileDWARF.cpp | 1962 const uint32_t prev_size = sc_list.GetSize(); in ResolveSymbolContext() local 2038 return sc_list.GetSize() - prev_size; in ResolveSymbolContext()
|
/external/python/cffi/c/ |
D | _cffi_backend.c | 4781 Py_ssize_t prev_size; in _add_field() local 4795 prev_size = PyDict_Size(interned_fields); in _add_field() 4802 if (PyDict_Size(interned_fields) != prev_size + 1) { in _add_field()
|