/external/rust/crates/grpcio-sys/grpc/src/core/lib/iomgr/ |
D | work_serializer.cc | 58 const size_t prev_size = size_.FetchAdd(1); in Run() local 60 GPR_DEBUG_ASSERT(prev_size > 0); in Run() 61 if (prev_size == 1) { in Run() 86 size_t prev_size = size_.FetchSub(1); in Orphan() local 87 if (prev_size == 1) { in Orphan() 104 size_t prev_size = size_.FetchSub(1); in DrainQueue() local 105 GPR_DEBUG_ASSERT(prev_size >= 1); in DrainQueue() 108 if (prev_size == 1) { in DrainQueue() 115 if (prev_size == 2) { in DrainQueue()
|
D | call_combiner.cc | 124 size_t prev_size = in Start() local 127 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in Start() 128 prev_size + 1); in Start() 131 if (prev_size == 0) { in Start() 156 size_t prev_size = in Stop() local 159 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in Stop() 160 prev_size - 1); in Stop() 162 GPR_ASSERT(prev_size >= 1); in Stop() 163 if (prev_size > 1) { in Stop()
|
/external/grpc-grpc/src/core/lib/iomgr/ |
D | call_combiner.cc | 73 size_t prev_size = static_cast<size_t>( in grpc_call_combiner_start() local 76 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in grpc_call_combiner_start() 77 prev_size + 1); in grpc_call_combiner_start() 80 if (prev_size == 0) { in grpc_call_combiner_start() 108 size_t prev_size = static_cast<size_t>( in grpc_call_combiner_stop() local 111 gpr_log(GPR_INFO, " size: %" PRIdPTR " -> %" PRIdPTR, prev_size, in grpc_call_combiner_stop() 112 prev_size - 1); in grpc_call_combiner_stop() 114 GPR_ASSERT(prev_size >= 1); in grpc_call_combiner_stop() 115 if (prev_size > 1) { in grpc_call_combiner_stop()
|
/external/pcre/src/sljit/ |
D | sljitExecAllocator.c | 223 sljit_uw prev_size; member 291 header->prev_size = chunk_size; in sljit_malloc_exec() 292 AS_BLOCK_HEADER(header, size)->prev_size = size; in sljit_malloc_exec() 317 header->prev_size = 0; in sljit_malloc_exec() 325 free_block->header.prev_size = size; in sljit_malloc_exec() 336 next_header->prev_size = chunk_size; in sljit_malloc_exec() 355 free_block = AS_FREE_BLOCK(header, -(sljit_sw)header->prev_size); in sljit_free_exec() 359 header->prev_size = free_block->size; in sljit_free_exec() 371 header->prev_size = free_block->size; in sljit_free_exec() 375 if (SLJIT_UNLIKELY(!free_block->header.prev_size && header->size == 1)) { in sljit_free_exec() [all …]
|
D | sljitProtExecAllocator.c | 268 sljit_uw prev_size; member 338 header->prev_size = chunk_size; in sljit_malloc_exec() 340 AS_BLOCK_HEADER(header, size)->prev_size = size; in sljit_malloc_exec() 371 header->prev_size = 0; in sljit_malloc_exec() 380 free_block->header.prev_size = size; in sljit_malloc_exec() 392 next_header->prev_size = chunk_size; in sljit_malloc_exec() 412 free_block = AS_FREE_BLOCK(header, -(sljit_sw)header->prev_size); in sljit_free_exec() 416 header->prev_size = free_block->size; in sljit_free_exec() 428 header->prev_size = free_block->size; in sljit_free_exec() 432 if (SLJIT_UNLIKELY(!free_block->header.prev_size && header->size == 1)) { in sljit_free_exec() [all …]
|
/external/grpc-grpc/third_party/nanopb/ |
D | pb_common.c | 42 size_t prev_size = prev_field->data_size; in pb_field_iter_next() local 48 prev_size = 0; in pb_field_iter_next() 56 prev_size *= prev_field->array_size; in pb_field_iter_next() 62 prev_size = sizeof(void*); in pb_field_iter_next() 72 iter->pData = (char*)iter->pData + prev_size + iter->pos->data_offset; in pb_field_iter_next()
|
/external/nanopb-c/ |
D | pb_common.c | 42 size_t prev_size = prev_field->data_size; in pb_field_iter_next() local 56 prev_size *= prev_field->array_size; in pb_field_iter_next() 62 prev_size = sizeof(void*); in pb_field_iter_next() 72 iter->pData = (char*)iter->pData + prev_size + iter->pos->data_offset; in pb_field_iter_next()
|
/external/jemalloc_new/test/unit/ |
D | bitmap.c | 141 size_t prev_size) { in test_bitmap_size_body() argument 145 assert_zu_ge(size, prev_size, "Bitmap size is smaller than expected"); in test_bitmap_size_body() 150 size_t nbits, prev_size; in TEST_BEGIN() local 152 prev_size = 0; in TEST_BEGIN() 156 prev_size = test_bitmap_size_body(&binfo, nbits, prev_size); in TEST_BEGIN() 160 prev_size = test_bitmap_size_body(&binfo, nbits, \ in TEST_BEGIN() 161 prev_size); \ in TEST_BEGIN() 163 prev_size = 0; in TEST_BEGIN()
|
/external/mesa3d/src/amd/compiler/ |
D | aco_print_asm.cpp | 190 unsigned prev_size = 0; in print_asm() local 195 if (pos + prev_size <= exec_size && prev_pos != pos && !new_block && in print_asm() 196 memcmp(&binary[prev_pos], &binary[pos], prev_size * 4) == 0) { in print_asm() 198 pos += prev_size; in print_asm() 223 prev_size = res.second; in print_asm()
|
/external/tensorflow/tensorflow/core/kernels/boosted_trees/quantiles/ |
D | weighted_quantiles_summary_test.cc | 132 int prev_size = 1; in TEST_F() local 157 size += prev_size; in TEST_F() 158 prev_size = last_size; in TEST_F()
|
/external/perfetto/src/kallsyms/ |
D | kernel_symbol_map.cc | 198 const size_t prev_size = buf_.size(); in Add() local 199 buf_.resize(prev_size + token_size); in Add() 200 char* tok_wptr = &buf_[prev_size]; in Add()
|
/external/libxml2/ |
D | error.c | 24 int size, prev_size = -1; \ 39 if (prev_size == chars) { \ 42 prev_size = chars; \
|
/external/swiftshader/third_party/llvm-10.0/llvm/include/llvm/CodeGen/GlobalISel/ |
D | LegalizerInfo.h | 1247 int prev_size = -1; in checkPartialSizeAndActionsVector() local 1249 assert(SizeAndAction.first > prev_size); in checkPartialSizeAndActionsVector() 1250 prev_size = SizeAndAction.first; in checkPartialSizeAndActionsVector()
|
/external/armnn/docs/ |
D | FAQ.md | 33 UnitTests executable exits with "corrupted size vs. prev_size" in armv7 environments.
|
/external/jemalloc_new/src/ |
D | extent.c | 1640 size_t prev_size; in extent_record() local 1642 prev_size = extent_size_get(extent); in extent_record() 1648 extent_size_get(extent) >= prev_size + LARGE_MINCLASS); in extent_record()
|
/external/python/cffi/c/ |
D | _cffi_backend.c | 5011 Py_ssize_t prev_size; in _add_field() local 5025 prev_size = PyDict_Size(interned_fields); in _add_field() 5032 if (PyDict_Size(interned_fields) != prev_size + 1) { in _add_field()
|