/external/eigen/Eigen/src/StlSupport/ |
D | StdVector.h | 80 void resize(size_type new_size) in resize() argument 81 { resize(new_size, T()); } in resize() 85 void resize(size_type new_size, const value_type& x) in resize() argument 87 if (vector_base::size() < new_size) in resize() 88 vector_base::_Insert_n(vector_base::end(), new_size - vector_base::size(), x); in resize() 89 else if (new_size < vector_base::size()) in resize() 90 vector_base::erase(vector_base::begin() + new_size, vector_base::end()); in resize() 97 void insert(const_iterator position, size_type new_size, const value_type& x) in insert() argument 98 { vector_base::insert(position, new_size, x); } in insert() 102 void resize(size_type new_size, const value_type& x) in resize() argument [all …]
|
D | StdDeque.h | 80 void resize(size_type new_size) in resize() argument 81 { resize(new_size, T()); } in resize() 85 void resize(size_type new_size, const value_type& x) in resize() argument 87 if (deque_base::size() < new_size) in resize() 88 deque_base::_Insert_n(deque_base::end(), new_size - deque_base::size(), x); in resize() 89 else if (new_size < deque_base::size()) in resize() 90 deque_base::erase(deque_base::begin() + new_size, deque_base::end()); in resize() 99 void insert(const_iterator position, size_type new_size, const value_type& x) in insert() argument 100 { deque_base::insert(position, new_size, x); } in insert() 103 void resize(size_type new_size, const value_type& x) in resize() argument [all …]
|
/external/tensorflow/tensorflow/core/kernels/boosted_trees/quantiles/ |
D | weighted_quantiles_summary_test.cc | 94 for (int new_size = 9; new_size >= 2; --new_size) { in TEST_F() local 97 summary.Compress(new_size); in TEST_F() 101 EXPECT_TRUE(summary.Size() >= new_size && summary.Size() <= new_size + 2); in TEST_F() 102 EXPECT_LE(summary.ApproximationError(), 1.0 / new_size); in TEST_F() 114 for (int new_size = 9; new_size >= 2; new_size -= 2) { in TEST_F() local 116 summary.Compress(new_size); in TEST_F() 119 EXPECT_TRUE(summary.Size() >= new_size && summary.Size() <= new_size + 2); in TEST_F() 120 EXPECT_LE(summary.ApproximationError(), prev_eps + 1.0 / new_size); in TEST_F() 148 int new_size = std::max(rand.Uniform(size), 2u); in TEST_F() local 149 summary.Compress(new_size); in TEST_F() [all …]
|
/external/ltp/testcases/kernel/tracing/ftrace_test/ftrace_stress/ |
D | ftrace_buffer_size_kb.sh | 30 new_size=1 33 echo $new_size > "$TRACING_PATH"/buffer_size_kb 34 new_size=$(( $new_size + $step )) 40 new_size=$(( $new_size - $step )) 41 echo $new_size > "$TRACING_PATH"/buffer_size_kb
|
/external/python/cpython3/Lib/ctypes/test/ |
D | test_varsize_struct.py | 17 new_size = sizeof(X) + sizeof(c_int) * 1 18 resize(x, new_size) 19 self.assertEqual(sizeof(x), new_size) 23 new_size = sizeof(X) + sizeof(c_int) * 9 24 resize(x, new_size) 25 self.assertEqual(sizeof(x), new_size) 29 new_size = sizeof(X) + sizeof(c_int) * 1 30 resize(x, new_size) 31 self.assertEqual(sizeof(x), new_size)
|
/external/python/cpython2/Lib/ctypes/test/ |
D | test_varsize_struct.py | 17 new_size = sizeof(X) + sizeof(c_int) * 1 18 resize(x, new_size) 19 self.assertEqual(sizeof(x), new_size) 23 new_size = sizeof(X) + sizeof(c_int) * 9 24 resize(x, new_size) 25 self.assertEqual(sizeof(x), new_size) 29 new_size = sizeof(X) + sizeof(c_int) * 1 30 resize(x, new_size) 31 self.assertEqual(sizeof(x), new_size)
|
/external/ltp/testcases/kernel/syscalls/mremap/ |
D | mremap05.c | 54 size_t new_size; /* in pages */ member 77 .new_size = 1, 86 .new_size = 1, 95 .new_size = 1, 104 .new_size = 1, 111 .new_size = 1, 140 t->ret = mremap(t->old_address, t->old_size, t->new_size, t->flags, in test_mremap() 165 t->new_address = get_test_area(t->new_size * pagesize, 1); in setup0() 171 t->new_address = get_test_area((t->new_size + 1) * pagesize, 1) + 1; in setup1() 183 t->new_address = get_test_area(t->new_size * pagesize, 1); in setup3() [all …]
|
/external/protobuf/src/google/protobuf/ |
D | repeated_field.cc | 49 int new_size = current_size_ + extend_amount; in InternalExtend() local 50 if (total_size_ >= new_size) { in InternalExtend() 57 new_size = std::max(kMinRepeatedFieldAllocationSize, in InternalExtend() 58 std::max(total_size_ * 2, new_size)); in InternalExtend() 59 GOOGLE_CHECK_LE(new_size, (std::numeric_limits<size_t>::max() - kRepHeaderSize) / in InternalExtend() 62 size_t bytes = kRepHeaderSize + sizeof(old_rep->elements[0]) * new_size; in InternalExtend() 71 total_size_ = new_size; in InternalExtend() 91 void RepeatedPtrFieldBase::Reserve(int new_size) { in Reserve() argument 92 if (new_size > current_size_) { in Reserve() 93 InternalExtend(new_size - current_size_); in Reserve()
|
/external/e2fsprogs/resize/ |
D | main.c | 259 blk64_t new_size = 0; in main() local 501 new_size = min_size; in main() 503 new_size = parse_num_blocks2(new_size_str, in main() 505 if (new_size == 0) { in main() 511 new_size = max_size; in main() 514 new_size &= ~((blk64_t)((sys_page_size / blocksize)-1)); in main() 518 new_size = ext2fs_blocks_count(fs->super); in main() 522 if (new_size == (1ULL << 32)) in main() 523 new_size--; in main() 524 else if (new_size > (1ULL << 32)) { in main() [all …]
|
/external/freetype/src/lzw/ |
D | ftzopen.c | 126 FT_Offset new_size = old_size; in ft_lzwstate_stack_grow() local 128 new_size = new_size + ( new_size >> 1 ) + 4; in ft_lzwstate_stack_grow() 138 if ( new_size > ( 1 << LZW_MAX_BITS ) ) in ft_lzwstate_stack_grow() 140 new_size = 1 << LZW_MAX_BITS; in ft_lzwstate_stack_grow() 141 if ( new_size == old_size ) in ft_lzwstate_stack_grow() 145 if ( FT_RENEW_ARRAY( state->stack, old_size, new_size ) ) in ft_lzwstate_stack_grow() 148 state->stack_size = new_size; in ft_lzwstate_stack_grow() 159 FT_UInt new_size = old_size; in ft_lzwstate_prefix_grow() local 164 if ( new_size == 0 ) /* first allocation -> 9 bits */ in ft_lzwstate_prefix_grow() 165 new_size = 512; in ft_lzwstate_prefix_grow() [all …]
|
/external/tensorflow/tensorflow/lite/delegates/flex/ |
D | util_test.cc | 35 std::vector<int> new_size; member 52 TfLiteIntArray* new_size) { in ResizeTensor() argument 54 c->new_size.clear(); in ResizeTensor() 55 for (int i = 0; i < new_size->size; ++i) { in ResizeTensor() 56 c->new_size.push_back(new_size->data[i]); in ResizeTensor() 58 TfLiteIntArrayFree(new_size); in ResizeTensor() 83 EXPECT_THAT(context.new_size, ElementsAre(0)); in TEST() 88 EXPECT_THAT(context.new_size, ElementsAre(1, 2)); in TEST() 93 EXPECT_THAT(context.new_size, ElementsAre(1, 2)); in TEST() 105 EXPECT_THAT(context.new_size, ElementsAre(1, 2)); in TEST()
|
/external/v8/src/zone/ |
D | zone.cc | 124 size_t new_size = kSegmentOverhead + new_size_no_overhead; in NewExpand() local 127 if (new_size_no_overhead < size || new_size < kSegmentOverhead) { in NewExpand() 131 if (new_size < kMinimumSegmentSize) { in NewExpand() 132 new_size = kMinimumSegmentSize; in NewExpand() 133 } else if (new_size >= kMaximumSegmentSize) { in NewExpand() 138 new_size = Max(min_new_size, kMaximumSegmentSize); in NewExpand() 140 if (new_size > INT_MAX) { in NewExpand() 145 allocator_->AllocateSegment(new_size, supports_compression()); in NewExpand() 151 DCHECK_GE(segment->total_size(), new_size); in NewExpand()
|
/external/perfetto/include/perfetto/ext/base/ |
D | container_annotations.h | 26 #define ANNOTATE_NEW_BUFFER(buffer, capacity, new_size) \ argument 30 (buffer) + (new_size)); \ 38 #define ANNOTATE_CHANGE_SIZE(buffer, capacity, old_size, new_size) \ argument 42 (buffer) + (new_size)); \ 50 #define ANNOTATE_NEW_BUFFER(buffer, capacity, new_size) argument 52 #define ANNOTATE_CHANGE_SIZE(buffer, capacity, old_size, new_size) argument
|
/external/pdfium/core/fxcrt/ |
D | cfx_binarybuf.cpp | 55 FX_SAFE_SIZE_T new_size = m_DataSize; in ExpandBuf() local 56 new_size += add_size; in ExpandBuf() 57 if (m_AllocSize >= new_size.ValueOrDie()) in ExpandBuf() 62 new_size += alloc_step - 1; // Quantize, don't combine these lines. in ExpandBuf() 63 new_size /= alloc_step; in ExpandBuf() 64 new_size *= alloc_step; in ExpandBuf() 65 m_AllocSize = new_size.ValueOrDie(); in ExpandBuf()
|
D | cfx_memorystream.cpp | 82 FX_SAFE_SIZE_T new_size = new_pos; in WriteBlockAtOffset() local 83 new_size *= 2; in WriteBlockAtOffset() 84 new_size += (kBlockSize - 1); in WriteBlockAtOffset() 85 new_size /= kBlockSize; in WriteBlockAtOffset() 86 new_size *= kBlockSize; in WriteBlockAtOffset() 87 if (!new_size.IsValid()) in WriteBlockAtOffset() 90 m_nTotalSize = new_size.ValueOrDie(); in WriteBlockAtOffset()
|
/external/v8/src/base/ |
D | region-allocator.cc | 85 size_t new_size) { in Split() argument 86 DCHECK(IsAligned(new_size, page_size_)); in Split() 87 DCHECK_NE(new_size, 0); in Split() 88 DCHECK_GT(region->size(), new_size); in Split() 94 new Region(region->begin() + new_size, region->size() - new_size, state); in Split() 99 region->set_size(new_size); in Split() 186 size_t new_size = requested_address - region->begin(); in AllocateRegionAt() local 187 DCHECK(IsAligned(new_size, page_size_)); in AllocateRegionAt() 188 region = Split(region, new_size); in AllocateRegionAt() 203 size_t RegionAllocator::TrimRegion(Address address, size_t new_size) { in TrimRegion() argument [all …]
|
/external/tensorflow/tensorflow/core/platform/ |
D | ctstring_internal.h | 229 size_t new_size) { in TF_TString_ResizeUninitialized() argument 231 size_t copy_size = TF_min(new_size, curr_size); in TF_TString_ResizeUninitialized() 237 if (new_size <= TF_TString_SmallCapacity) { in TF_TString_ResizeUninitialized() 238 str->u.smll.size = (uint8_t)((new_size << 2) | TF_TSTR_SMALL); // NOLINT in TF_TString_ResizeUninitialized() 239 str->u.smll.str[new_size] = '\0'; in TF_TString_ResizeUninitialized() 260 if (new_size < curr_size && new_size < curr_cap / 2) { in TF_TString_ResizeUninitialized() 263 } else if (new_size > curr_cap_x2) { in TF_TString_ResizeUninitialized() 264 new_cap = TF_align16(new_size + 1) - 1; in TF_TString_ResizeUninitialized() 265 } else if (new_size > curr_cap) { in TF_TString_ResizeUninitialized() 283 str->u.large.size = TF_TString_ToInternalSizeT(new_size, TF_TSTR_LARGE); in TF_TString_ResizeUninitialized() [all …]
|
/external/pdfium/third_party/base/allocator/partition_allocator/ |
D | partition_alloc.cc | 301 size_t new_size = internal::PartitionBucket::get_direct_map_size(raw_size); in PartitionReallocDirectMappedInPlace() local 302 if (new_size < kGenericMinDirectMappedDownsize) in PartitionReallocDirectMappedInPlace() 308 if (new_size == current_size) { in PartitionReallocDirectMappedInPlace() 310 } else if (new_size < current_size) { in PartitionReallocDirectMappedInPlace() 316 if ((new_size / kSystemPageSize) * 5 < (map_size / kSystemPageSize) * 4) in PartitionReallocDirectMappedInPlace() 320 size_t decommit_size = current_size - new_size; in PartitionReallocDirectMappedInPlace() 321 root->DecommitSystemPages(char_ptr + new_size, decommit_size); in PartitionReallocDirectMappedInPlace() 322 SetSystemPagesAccess(char_ptr + new_size, decommit_size, PageInaccessible); in PartitionReallocDirectMappedInPlace() 323 } else if (new_size <= in PartitionReallocDirectMappedInPlace() 327 size_t recommit_size = new_size - current_size; in PartitionReallocDirectMappedInPlace() [all …]
|
/external/tensorflow/tensorflow/core/lib/random/ |
D | weighted_picker.cc | 182 void WeightedPicker::Resize(int new_size) { in Resize() argument 183 CHECK_GE(new_size, 0); in Resize() 184 if (new_size <= LevelSize(num_levels_ - 1)) { in Resize() 189 for (int i = new_size; i < N_; i++) { in Resize() 195 N_ = new_size; in Resize() 202 assert(new_size > N_); in Resize() 203 WeightedPicker new_picker(new_size); in Resize() 207 memset(dst + N_, 0, sizeof(dst[0]) * (new_size - N_)); in Resize() 214 assert(this->N_ == new_size); in Resize()
|
/external/oss-fuzz/projects/libwebp/ |
D | fuzz_advanced_api.c | 73 size_t new_size = value + 1; in LLVMFuzzerTestOneInput() local 75 if (new_data + new_size > data + size) { in LLVMFuzzerTestOneInput() 76 new_size = data + size - new_data; in LLVMFuzzerTestOneInput() 78 status = WebPIAppend(idec, new_data, new_size); in LLVMFuzzerTestOneInput() 79 if (status != VP8_STATUS_SUSPENDED || new_size == 0) break; in LLVMFuzzerTestOneInput() 80 new_data += new_size; in LLVMFuzzerTestOneInput() 81 new_size *= 2; in LLVMFuzzerTestOneInput()
|
/external/virglrenderer/src/gallium/auxiliary/util/ |
D | u_bitmask.c | 93 unsigned new_size; in util_bitmask_resize() local 104 new_size = bm->size; in util_bitmask_resize() 105 while(new_size < minimum_size) { in util_bitmask_resize() 106 new_size *= 2; in util_bitmask_resize() 108 if(new_size < bm->size) in util_bitmask_resize() 111 assert(new_size); in util_bitmask_resize() 112 assert(new_size % UTIL_BITMASK_BITS_PER_WORD == 0); in util_bitmask_resize() 116 new_size / UTIL_BITMASK_BITS_PER_BYTE); in util_bitmask_resize() 122 (new_size - bm->size)/UTIL_BITMASK_BITS_PER_BYTE); in util_bitmask_resize() 124 bm->size = new_size; in util_bitmask_resize()
|
/external/mesa3d/src/gallium/auxiliary/util/ |
D | u_bitmask.c | 94 unsigned new_size; in util_bitmask_resize() local 105 new_size = bm->size; in util_bitmask_resize() 106 while (new_size < minimum_size) { in util_bitmask_resize() 107 new_size *= 2; in util_bitmask_resize() 109 if (new_size < bm->size) in util_bitmask_resize() 112 assert(new_size); in util_bitmask_resize() 113 assert(new_size % UTIL_BITMASK_BITS_PER_WORD == 0); in util_bitmask_resize() 118 new_size / UTIL_BITMASK_BITS_PER_BYTE); in util_bitmask_resize() 124 (new_size - bm->size)/UTIL_BITMASK_BITS_PER_BYTE); in util_bitmask_resize() 126 bm->size = new_size; in util_bitmask_resize()
|
/external/grpc-grpc/tools/profiling/ios_bin/ |
D | binary_size.py | 94 new_size = get_size('new', frameworks) variable 120 '{:,}'.format(new_size[i]), size_labels[i], '') 124 if abs(new_size[i] - old_size[i]) < diff_threshold: 126 if new_size[i] > old_size[i]: 131 text += row_format.format('{:,}'.format(new_size[i]), 135 if new_size[i] > old_size[i]: 137 elif new_size[i] < old_size[i]: 142 '{:,}'.format(new_size[i]), size_labels[i] + diff_sign,
|
/external/perfetto/src/protozero/ |
D | packed_repeated_fields.cc | 29 size_t new_size = old_size < 65536 ? (old_size * 2) : (old_size * 3 / 2); in GrowSlowpath() local 30 new_size = perfetto::base::AlignUp<4096>(new_size); in GrowSlowpath() 31 std::unique_ptr<uint8_t[]> new_buf(new uint8_t[new_size]); in GrowSlowpath() 35 storage_end_ = storage_begin_ + new_size; in GrowSlowpath()
|
/external/compiler-rt/lib/sanitizer_common/ |
D | sanitizer_tls_get_addr.cc | 53 static inline void DTLS_Resize(uptr new_size) { in DTLS_Resize() argument 54 if (dtls.dtv_size >= new_size) return; in DTLS_Resize() 55 new_size = RoundUpToPowerOfTwo(new_size); in DTLS_Resize() 56 new_size = Max(new_size, 4096UL / sizeof(DTLS::DTV)); in DTLS_Resize() 58 (DTLS::DTV *)MmapOrDie(new_size * sizeof(DTLS::DTV), "DTLS_Resize"); in DTLS_Resize() 68 dtls.dtv_size = new_size; in DTLS_Resize()
|