Home
last modified time | relevance | path

Searched refs:outer_size (Results 1 – 25 of 83) sorted by relevance

1234

/external/llvm-libc/src/__support/
Dblock.h127 LIBC_INLINE size_t outer_size() const { return next_ & SIZE_MASK; } in outer_size() function
129 LIBC_INLINE static size_t outer_size(size_t inner_size) { in outer_size() function
139 return inner_size(outer_size()); in inner_size()
144 LIBC_INLINE static size_t inner_size(size_t outer_size) { in inner_size() argument
146 return inner_size_free(outer_size) + sizeof(prev_); in inner_size()
153 return inner_size_free(outer_size()); in inner_size_free()
158 LIBC_INLINE static size_t inner_size_free(size_t outer_size) { in inner_size_free() argument
159 return outer_size - sizeof(Block); in inner_size_free()
180 return {reinterpret_cast<cpp::byte *>(this), outer_size()}; in region()
202 outer_size()); in next()
[all …]
/external/pigweed/pw_allocator/block/public/pw_allocator/block/
Dtesting.h80 size_t outer_size; member
173 if (preallocation.outer_size != Preallocation::kSizeRemaining) { in Preallocate()
174 size_t outer_size = in Preallocate() local
175 AlignUp(preallocation.outer_size, BlockType::kAlignment); in Preallocate()
176 PW_ASSERT(outer_size > BlockType::kBlockOverhead); in Preallocate()
177 PW_ASSERT(remaining_outer_size >= outer_size); in Preallocate()
178 remaining_outer_size -= outer_size; in Preallocate()
188 size_t outer_size = preallocation.outer_size; in Preallocate() local
189 if (outer_size == Preallocation::kSizeRemaining) { in Preallocate()
190 outer_size = remaining_outer_size; in Preallocate()
[all …]
Ddetailed_block.h98 constexpr explicit DetailedBlockImpl(size_t outer_size) : info_{} {
99 next_ = outer_size / Basic::kAlignment;
134 constexpr void SetNext(size_t outer_size, BlockType* next);
218 size_t outer_size = next_;
219 Hardening::Multiply(outer_size, Basic::kAlignment);
220 return outer_size;
240 constexpr void DetailedBlockImpl<Parameters>::SetNext(size_t outer_size,
242 next_ = outer_size / Basic::kAlignment;
253 size_t outer_size = prev_;
254 Hardening::Multiply(outer_size, Basic::kAlignment);
[all …]
Dsmall_block_base.h47 constexpr explicit SmallBlockBase(size_t outer_size) in SmallBlockBase()
49 next_and_last_(static_cast<T>(outer_size >> kShift) | 1U) {} in SmallBlockBase()
86 constexpr void SetNext(size_t outer_size, Derived* next) { in SetNext()
87 auto packed_size = static_cast<T>(outer_size >> kShift); in SetNext()
Dbasic.h149 static constexpr size_t InnerSizeFromOuterSize(size_t outer_size);
262 size_t outer_size = inner_size; in OuterSizeFromInnerSize() local
263 Hardening::Increment(outer_size, kBlockOverhead); in OuterSizeFromInnerSize()
264 return outer_size; in OuterSizeFromInnerSize()
269 size_t outer_size) { in InnerSizeFromOuterSize() argument
270 size_t inner_size = outer_size; in InnerSizeFromOuterSize()
Dsmall_block.h31 constexpr explicit SmallBlock(size_t outer_size) in SmallBlock() argument
32 : SmallBlockBase(outer_size) {} in SmallBlock()
/external/cronet/stable/third_party/llvm-libc/src/src/__support/
Dblock.h153 LIBC_INLINE size_t outer_size() const { return next_ & SIZE_MASK; } in outer_size() function
155 LIBC_INLINE static size_t outer_size(size_t inner_size) { in outer_size() function
165 return inner_size(outer_size()); in inner_size()
170 LIBC_INLINE static size_t inner_size(size_t outer_size) { in inner_size() argument
172 return inner_size_free(outer_size) + sizeof(prev_); in inner_size()
179 return inner_size_free(outer_size()); in inner_size_free()
184 LIBC_INLINE static size_t inner_size_free(size_t outer_size) { in inner_size_free() argument
185 return outer_size - BLOCK_OVERHEAD; in inner_size_free()
198 return {reinterpret_cast<cpp::byte *>(this), outer_size()}; in region()
219 outer_size()); in next()
[all …]
/external/cronet/tot/third_party/llvm-libc/src/src/__support/
Dblock.h153 LIBC_INLINE size_t outer_size() const { return next_ & SIZE_MASK; } in outer_size() function
155 LIBC_INLINE static size_t outer_size(size_t inner_size) { in outer_size() function
165 return inner_size(outer_size()); in inner_size()
170 LIBC_INLINE static size_t inner_size(size_t outer_size) { in inner_size() argument
172 return inner_size_free(outer_size) + sizeof(prev_); in inner_size()
179 return inner_size_free(outer_size()); in inner_size_free()
184 LIBC_INLINE static size_t inner_size_free(size_t outer_size) { in inner_size_free() argument
185 return outer_size - BLOCK_OVERHEAD; in inner_size_free()
198 return {reinterpret_cast<cpp::byte *>(this), outer_size()}; in region()
219 outer_size()); in next()
[all …]
/external/llvm-libc/test/src/__support/
Dblock_test.cpp38 EXPECT_EQ(last->outer_size(), sizeof(Block)); in TEST()
44 EXPECT_EQ(block->outer_size(), block_outer_size); in TEST()
88 size_t orig_size = block1->outer_size(); in TEST()
95 EXPECT_EQ(block1->outer_size(), in TEST()
98 EXPECT_EQ(block2->outer_size(), orig_size - block1->outer_size()); in TEST()
114 size_t orig_size = block1->outer_size(); in TEST()
124 EXPECT_EQ(block2->outer_size(), orig_size - block1->outer_size()); in TEST()
214 EXPECT_LE(block->outer_size(), sizeof(Block) + alignof(max_align_t)); in TEST()
231 EXPECT_LE((*result)->outer_size(), sizeof(Block) + alignof(max_align_t)); in TEST()
241 size_t orig_size = block->outer_size(); in TEST()
[all …]
/external/tensorflow/tensorflow/core/kernels/
Dgather_functor_batched_gpu.cu.h36 ValueOrVec* __restrict__ out, int64 outer_size, in GatherOpKernel() argument
60 batch_i = entries_count / outer_size; in GatherOpKernel()
61 outer_i = entries_count - batch_i * outer_size; in GatherOpKernel()
79 (batch_i * outer_size + outer_i) * gather_dim_size + gather_i in GatherOpKernel()
94 T* out, int64 outer_size, int64 gather_dim_size, in operator()
113 params_vec, indices, out_vec, outer_size, gather_dim_size, in operator()
124 const Index* indices, T* out, int64 outer_size, in LaunchGatherKernel() argument
136 outer_size, gather_dim_size, indices_size, slice_size, out_size); in LaunchGatherKernel()
157 const int64 outer_size = params.dimension(1);
169 outer_size, gather_dim_size, indices_size, slice_size,
Dgather_functor_batched.h45 const SliceIndex outer_size = static_cast<SliceIndex>(params.dimension(1)); in HandleCopiesBatched() local
62 const int64_t r_start = start % (outer_size * indices_size); in HandleCopiesBatched()
64 start / (outer_size * indices_size)); in HandleCopiesBatched()
77 if (++o_next >= outer_size) { in HandleCopiesBatched()
123 batch_size * outer_size * indices_size, slice_elems * sizeof(T), work); in HandleCopiesBatched()
138 const int64_t outer_size = params.dimension(1); in operator() local
143 batch_size * outer_size * indices_size * slice_size > in operator()
Dgather_op.cc138 int64_t outer_size = 1; in Compute() local
147 outer_size *= params.dim_size(i); in Compute()
166 {batch_size, outer_size, gather_dim_size, inner_size}); in Compute()
168 {batch_size, outer_size, N / batch_size, inner_size}); in Compute()
174 params.shaped<T, 3>({outer_size, gather_dim_size, inner_size}); in Compute()
175 auto out_flat = out->shaped<T, 3>({outer_size, N, inner_size}); in Compute()
/external/cronet/tot/third_party/llvm-libc/src/test/src/__support/
Dblock_test.cpp34 EXPECT_EQ(last->outer_size(), last_outer_size); in TEST()
38 EXPECT_EQ(block->outer_size(), kN - last_outer_size); in TEST()
73 size_t orig_size = block1->outer_size(); in TEST()
80 EXPECT_EQ(block1->outer_size(), in TEST()
83 EXPECT_EQ(block2->outer_size(), orig_size - block1->outer_size()); in TEST()
97 size_t orig_size = block1->outer_size(); in TEST()
113 EXPECT_EQ(block2->outer_size(), orig_size - block1->outer_size()); in TEST()
244 size_t orig_size = block->outer_size(); in TEST()
248 EXPECT_EQ(block->outer_size(), orig_size); in TEST()
281 size_t orig_size = block1->outer_size(); in TEST()
[all …]
/external/cronet/stable/third_party/llvm-libc/src/test/src/__support/
Dblock_test.cpp34 EXPECT_EQ(last->outer_size(), last_outer_size); in TEST()
38 EXPECT_EQ(block->outer_size(), kN - last_outer_size); in TEST()
73 size_t orig_size = block1->outer_size(); in TEST()
80 EXPECT_EQ(block1->outer_size(), in TEST()
83 EXPECT_EQ(block2->outer_size(), orig_size - block1->outer_size()); in TEST()
97 size_t orig_size = block1->outer_size(); in TEST()
113 EXPECT_EQ(block2->outer_size(), orig_size - block1->outer_size()); in TEST()
244 size_t orig_size = block->outer_size(); in TEST()
248 EXPECT_EQ(block->outer_size(), orig_size); in TEST()
281 size_t orig_size = block1->outer_size(); in TEST()
[all …]
/external/pigweed/pw_allocator/
Dbuddy_allocator.cc27 BuddyBlock::BuddyBlock(size_t outer_size) { in BuddyBlock() argument
28 outer_size_log2_ = cpp20::countr_zero(outer_size); in BuddyBlock()
120 size_t outer_size = BuddyBlock::OuterSizeFromInnerSize(inner_size); in Allocate() local
133 void* ptr = Allocate(layout.Extend(outer_size)); in Allocate()
155 size_t outer_size = in Deallocate() local
157 if (outer_size < block->OuterSize()) { in Deallocate()
166 item += outer_size; in Deallocate()
168 item -= outer_size; in Deallocate()
/external/tensorflow/tensorflow/lite/kernels/internal/reference/
Dgather.h57 int outer_size = 1; in Gather() local
59 outer_size *= input_shape.Dims(i); in Gather()
73 for (int outer = 0; outer < outer_size; ++outer) { in Gather()
80 (((batch * outer_size) + outer) * coord_size + i) * inner_size, in Gather()
81 input_data + (((batch * outer_size) + outer) * axis_size + in Gather()
Dconcatenation.h51 int64_t outer_size = 1; in Concatenation() local
53 outer_size *= output_shape.Dims(i); in Concatenation()
63 for (int k = 0; k < outer_size; k++) { in Concatenation()
102 int64_t outer_size = 1; in ConcatenationWithScaling() local
104 outer_size *= output_shape.Dims(i); in ConcatenationWithScaling()
115 for (int k = 0; k < outer_size; k++) { in ConcatenationWithScaling()
Dlog_softmax.h32 const int outer_size = in LogSoftmax() local
37 for (int i = 0; i < outer_size; ++i) { in LogSoftmax()
84 const int outer_size = in LogSoftmax() local
89 for (int i = 0; i < outer_size; ++i) { in LogSoftmax()
155 const size_t outer_size, const size_t depth, in LogSoftmaxQuantized() argument
179 for (size_t outer_index = 0; outer_index < outer_size; ++outer_index) { in LogSoftmaxQuantized()
245 inline void LogSoftmax(const SoftmaxParams& params, const size_t outer_size, in LogSoftmax() argument
249 LogSoftmaxQuantized(params, outer_size, depth, input_shape, input_data, in LogSoftmax()
Dl2normalization.h35 const int outer_size = variable
39 for (int i = 0; i < outer_size; ++i) {
61 const int outer_size = in L2Normalization() local
65 for (int i = 0; i < outer_size; ++i) { in L2Normalization()
Dreference_ops.h343 int outer_size = 1; in Pack() local
345 outer_size *= output_shape.Dims(i); in Pack()
351 TFLITE_DCHECK_EQ((**input_shapes).FlatSize(), copy_size * outer_size); in Pack()
354 for (int k = 0; k < outer_size; k++) { in Pack()
370 int outer_size = 1; in Unpack() local
378 outer_size *= input_shape.Dims(i); in Unpack()
384 TFLITE_DCHECK_EQ(output_shape.FlatSize(), copy_size * outer_size); in Unpack()
387 for (int k = 0; k < outer_size; k++) { in Unpack()
409 int outer_size = 1; in PackWithScaling() local
411 outer_size *= output_shape.Dims(i); in PackWithScaling()
[all …]
Dsoftmax.h35 const int outer_size = in Softmax() local
40 for (int i = 0; i < outer_size; ++i) { in Softmax()
88 const int outer_size = in Softmax() local
93 for (int i = 0; i < outer_size; ++i) { in Softmax()
172 const int outer_size = in SoftmaxInt16() local
177 for (int i = 0; i < outer_size; ++i) { in SoftmaxInt16()
/external/pytorch/aten/src/ATen/native/cpu/
DSoftMaxKernel.cpp37 int64_t outer_size, in _vec_log_softmax_lastdim() argument
47 int64_t CHUNK_SIZE = std::min<int64_t>(MAX_CHUNK_SIZE, outer_size); in _vec_log_softmax_lastdim()
54 parallel_for(0, outer_size, 0, [&](int64_t begin, int64_t end) { in _vec_log_softmax_lastdim()
117 int64_t outer_size, in _vec_softmax_lastdim() argument
121 parallel_for(0, outer_size, 0, [&](int64_t begin, int64_t end) { in _vec_softmax_lastdim()
151 int64_t outer_size, in _vec_softmax_lastdim() argument
156 parallel_for(0, outer_size, 0, [&](int64_t begin, int64_t end) { in _vec_softmax_lastdim()
217 int64_t outer_size, in _vec_host_softmax_backward_lastdim() argument
223 outer_size, in _vec_host_softmax_backward_lastdim()
268 int64_t outer_size, in _vec_softmax_backward() argument
[all …]
/external/perfetto/src/base/
Dpaged_memory.cc57 size_t outer_size = rounded_up_size + GuardSize() * 2; in Allocate() local
59 void* ptr = VirtualAlloc(nullptr, outer_size, MEM_RESERVE, PAGE_NOACCESS); in Allocate()
65 void* ptr = mmap(nullptr, outer_size, PROT_READ | PROT_WRITE, in Allocate()
114 const size_t outer_size = RoundUpToSysPageSize(size_) + GuardSize() * 2; in ~PagedMemory() local
115 int res = munmap(start, outer_size); in ~PagedMemory()
/external/ComputeLibrary/tests/validation/reference/
DSelect.cpp63 const int outer_size = c.num_elements(); in select() local
64 const int inner_size = x.num_elements() / outer_size; in select()
67 for(int i = 0; i < outer_size; ++i) in select()
/external/pytorch/aten/src/ATen/native/cuda/
DSoftMax.cu106 uint64_t outer_size, uint64_t inner_size) { in SpatialSoftMax_getGridSize() argument
113 if (outer_blocks > outer_size) in SpatialSoftMax_getGridSize()
114 outer_blocks = outer_size; in SpatialSoftMax_getGridSize()
137 uint64_t outer_size, uint64_t dim_size, uint64_t inner_size, in SpatialSoftMax_getLaunchSizes() argument
154 grid = SpatialSoftMax_getGridSize(block, max_active_blocks, outer_size, inner_size); in SpatialSoftMax_getLaunchSizes()
257 index_t outer_size, index_t dim_size, index_t inner_size) in cunn_SpatialSoftMaxForward() argument
264 for (index_t outer_index = blockIdx.x; outer_index < outer_size; outer_index += gridDim.x) { in cunn_SpatialSoftMaxForward()
315 uint32_t outer_size, uint32_t dim_size, uint32_t inner_size) in cunn_SpatialSoftMaxBackward() argument
322 for (uint32_t outer_index = blockIdx.x; outer_index < outer_size; outer_index += gridDim.x) { in cunn_SpatialSoftMaxBackward()
829 int64_t outer_size = 1; in host_softmax() local
[all …]

1234