Home
last modified time | relevance | path

Searched refs:BitSize (Results 1 – 10 of 10) sorted by relevance

/art/runtime/base/
Dbit_vector-inl.h33 DCHECK_LT(bit_index_, BitSize());
38 DCHECK_LT(bit_index_, BitSize());
50 DCHECK_LE(start_index, BitSize()); in FindIndex()
61 return BitSize(); in FindIndex()
Dbit_field_test.cc31 ASSERT_EQ(8, TestBitFields::BitSize()); in TEST()
Dbit_vector.h66 return bit_index_ == BitSize(); in Done()
81 bit_index_(BitSize()) { } in IndexIterator()
83 uint32_t BitSize() const { in BitSize() function
Dbit_field.h64 static int BitSize() { in BitSize() function
/art/runtime/
Dstack_map.h668 DCHECK_LE(BitSize(), 32u); in start_offset_()
671 ALWAYS_INLINE size_t BitSize() const { return end_offset_ - start_offset_; } in BitSize() function
676 return static_cast<int32_t>(region.LoadBits(start_offset_, BitSize())) + min_value_; in Load()
681 region.StoreBits(start_offset_, value - min_value_, BitSize()); in Store()
756 ALWAYS_INLINE size_t BitSize() const { in BitSize() function
934 ALWAYS_INLINE size_t BitSize() const { in BitSize() function
1068 size_t entry_size = encoding.BitSize(); in GetRegionAtDepth()
1081 ALWAYS_INLINE size_t BitSize() const { in BitSize() function
1123 *offset += encoding.BitSize() * num_entries; in UpdateBitOffset()
1130 const size_t map_size = encoding.BitSize(); in BitRegion()
[all …]
Dstack_map.cc205 << " [entry_size=0x" << encoding.stack_map.encoding.BitSize() << " bits]" in Dump()
214 for (size_t i = 0, e = encoding.stack_mask.encoding.BitSize(); i < e; ++i) { in Dump()
Dquick_exception_handler.cc443 if (bit < encoding.stack_mask.encoding.BitSize() && stack_mask.LoadBit(bit)) { in HandleOptimizingDeoptimization()
/art/oatdump/
Doatdump.cc1669 stack_map_encoding.GetNativePcEncoding().BitSize() * num_stack_maps); in DumpCode()
1672 stack_map_encoding.GetDexPcEncoding().BitSize() * num_stack_maps); in DumpCode()
1675 stack_map_encoding.GetDexRegisterMapEncoding().BitSize() * num_stack_maps); in DumpCode()
1678 stack_map_encoding.GetInlineInfoEncoding().BitSize() * num_stack_maps); in DumpCode()
1681 stack_map_encoding.GetRegisterMaskIndexEncoding().BitSize() * num_stack_maps); in DumpCode()
1684 stack_map_encoding.GetStackMaskIndexEncoding().BitSize() * num_stack_maps); in DumpCode()
1689 encoding.stack_mask.encoding.BitSize() * encoding.stack_mask.num_entries); in DumpCode()
1694 encoding.register_mask.encoding.BitSize() * encoding.register_mask.num_entries); in DumpCode()
1700 encoding.invoke_info.encoding.BitSize() * encoding.invoke_info.num_entries); in DumpCode()
1720 encoding.inline_info.encoding.GetMethodIndexIdxEncoding().BitSize() * in DumpCode()
[all …]
/art/compiler/optimizing/
Dstack_map_stream.cc411 } else if (encoding.stack_map.encoding.GetInlineInfoEncoding().BitSize() > 0) { in FillInCodeInfo()
417 const size_t stack_mask_bits = encoding.stack_mask.encoding.BitSize(); in FillInCodeInfo()
432 register_mask.StoreBits(0, register_masks_[i], encoding.register_mask.encoding.BitSize()); in FillInCodeInfo()
Dstack_map_test.cc35 if (bit_vector.GetNumberOfBits() > encoding.stack_mask.encoding.BitSize()) { in CheckStackMask()
38 for (size_t i = 0; i < encoding.stack_mask.encoding.BitSize(); ++i) { in CheckStackMask()