Home
last modified time | relevance | path

Searched refs:byte (Results 1 – 25 of 111) sorted by relevance

12345

/art/runtime/gc/accounting/
Dcard_table.h54 static CardTable* Create(const byte* heap_begin, size_t heap_capacity);
67 byte GetCard(const mirror::Object* obj) const { in GetCard()
74 byte* card_start = CardFromAddr(start); in VisitClear()
75 byte* card_end = CardFromAddr(end); in VisitClear()
76 for (byte* it = card_start; it != card_end; ++it) { in VisitClear()
86 byte* GetBiasedBegin() const { in GetBiasedBegin()
99 void ModifyCardsAtomic(byte* scan_begin, byte* scan_end, const Visitor& visitor,
105 size_t Scan(SpaceBitmap<kObjectAlignment>* bitmap, byte* scan_begin, byte* scan_end,
107 const byte minimum_age = kCardDirty) const
112 void CheckAddrIsInCardTable(const byte* addr) const;
[all …]
Dcard_table_test.cc50 CardTableTest() : heap_begin_(reinterpret_cast<byte*>(0x2000000)), heap_size_(2 * MB) { in CardTableTest()
55 byte* HeapBegin() const { in HeapBegin()
58 byte* HeapLimit() const { in HeapLimit()
61 byte PRandCard(const byte* addr) const { in PRandCard()
66 for (const byte* addr = HeapBegin(); addr != HeapLimit(); addr += kCardSize) { in FillRandom()
68 byte* card = card_table_->CardFromAddr(addr); in FillRandom()
74 byte* const heap_begin_;
80 for (const byte* addr = HeapBegin(); addr < HeapLimit(); addr += kObjectAlignment) { in TEST_F()
87 byte* card_addr = card_table_->CardFromAddr(addr); in TEST_F()
96 byte operator()(byte c) const { in operator ()()
[all …]
Dcard_table-inl.h30 static inline bool byte_cas(byte old_value, byte new_value, byte* address) { in byte_cas()
32 Atomic<byte>* byte_atomic = reinterpret_cast<Atomic<byte>*>(address); in byte_cas()
52 inline size_t CardTable::Scan(ContinuousSpaceBitmap* bitmap, byte* scan_begin, byte* scan_end, in Scan()
53 const Visitor& visitor, const byte minimum_age) const { in Scan()
54 DCHECK_GE(scan_begin, reinterpret_cast<byte*>(bitmap->HeapBegin())); in Scan()
56 DCHECK_LE(scan_end, reinterpret_cast<byte*>(bitmap->HeapLimit())); in Scan()
57 byte* card_cur = CardFromAddr(scan_begin); in Scan()
58 byte* card_end = CardFromAddr(AlignUp(scan_end, kCardSize)); in Scan()
73 byte* aligned_end = card_end - in Scan()
88 uintptr_t start = reinterpret_cast<uintptr_t>(AddrFromCard(reinterpret_cast<byte*>(word_cur))); in Scan()
[all …]
Dcard_table.cc58 CardTable* CardTable::Create(const byte* heap_begin, size_t heap_capacity) { in Create()
71 byte* cardtable_begin = mem_map->Begin(); in Create()
77 byte* biased_begin = reinterpret_cast<byte*>(reinterpret_cast<uintptr_t>(cardtable_begin) - in Create()
89 CardTable::CardTable(MemMap* mem_map, byte* biased_begin, size_t offset) in CardTable()
95 byte* card_start = CardFromAddr(space->Begin()); in ClearSpaceCards()
96 byte* card_end = CardFromAddr(space->End()); // Make sure to round up. in ClearSpaceCards()
109 void CardTable::CheckAddrIsInCardTable(const byte* addr) const { in CheckAddrIsInCardTable()
110 byte* card_addr = biased_begin_ + ((uintptr_t)addr >> kCardShift); in CheckAddrIsInCardTable()
111 byte* begin = mem_map_->Begin() + offset_; in CheckAddrIsInCardTable()
112 byte* end = mem_map_->End(); in CheckAddrIsInCardTable()
Dremembered_set.cc45 void operator()(byte* card, byte expected_value, byte new_value) const { in operator ()()
132 for (byte* const card_addr : dirty_cards_) { in UpdateAndMarkReferences()
148 for (byte* const card_addr : remove_card_set) { in UpdateAndMarkReferences()
157 for (const byte* card_addr : dirty_cards_) { in Dump()
167 for (const byte* card_addr : dirty_cards_) { in AssertAllDirtyCardsAreWithinSpace()
168 auto start = reinterpret_cast<byte*>(card_table->AddrFromCard(card_addr)); in AssertAllDirtyCardsAreWithinSpace()
Dmod_union_table.cc48 inline void operator()(byte* card, byte expected_value, byte new_value) const { in operator ()()
60 explicit ModUnionClearCardVisitor(std::vector<byte*>* cleared_cards) in ModUnionClearCardVisitor()
64 void operator()(byte* card, byte expected_card, byte new_card) const { in operator ()()
70 std::vector<byte*>* const cleared_cards_;
229 const byte* card = ref_pair.first; in Verify()
245 for (byte* card_addr : cleared_cards_) { in Dump()
252 const byte* card_addr = ref_pair.first; in Dump()
318 for (const byte* card_addr : cleared_cards_) { in UpdateAndMarkReferences()
328 for (const byte* card_addr : cleared_cards_) { in Dump()
/art/runtime/
Dimage.h48 byte* GetImageBegin() const { in GetImageBegin()
49 return reinterpret_cast<byte*>(image_begin_); in GetImageBegin()
72 byte* GetOatFileBegin() const { in GetOatFileBegin()
73 return reinterpret_cast<byte*>(oat_file_begin_); in GetOatFileBegin()
76 byte* GetOatDataBegin() const { in GetOatDataBegin()
77 return reinterpret_cast<byte*>(oat_data_begin_); in GetOatDataBegin()
80 byte* GetOatDataEnd() const { in GetOatDataEnd()
81 return reinterpret_cast<byte*>(oat_data_end_); in GetOatDataEnd()
84 byte* GetOatFileEnd() const { in GetOatFileEnd()
85 return reinterpret_cast<byte*>(oat_file_end_); in GetOatFileEnd()
[all …]
Dmemory_region.h44 byte* start() const { return reinterpret_cast<byte*>(pointer_); } in start()
45 byte* end() const { return start() + size_; } in end()
63 uint8_t byte = *ComputeBitPointer(bit_offset, &bit_mask); in LoadBit() local
64 return byte & bit_mask; in LoadBit()
69 uint8_t* byte = ComputeBitPointer(bit_offset, &bit_mask); in StoreBit() local
71 *byte |= bit_mask; in StoreBit()
73 *byte &= ~bit_mask; in StoreBit()
101 byte* ComputeBitPointer(uintptr_t bit_offset, byte* bit_mask) const { in ComputeBitPointer()
105 return ComputeInternalPointer<byte>(byte_offset); in ComputeBitPointer()
Delf_file.h57 byte* Begin() const { in Begin()
62 byte* End() const { in End()
83 const byte* FindDynamicSymbolAddress(const std::string& symbol_name) const;
121 byte* GetProgramHeadersStart() const;
122 byte* GetSectionHeadersStart() const;
138 bool ValidPointer(const byte* start) const;
146 bool CheckSectionsLinked(const byte* source, const byte* target) const;
149 bool CheckAndSet(Elf32_Off offset, const char* label, byte** target, std::string* error_msg);
183 byte* base_address_;
186 byte* program_headers_start_;
[all …]
Dmem_map.h63 static MemMap* MapAnonymous(const char* ashmem_name, byte* addr, size_t byte_count, int prot,
83 static MemMap* MapFileAtAddress(byte* addr, size_t byte_count, int prot, int flags, int fd,
102 byte* Begin() const { in Begin()
110 byte* End() const { in End()
123 return reinterpret_cast<byte*>(BaseBegin()) + BaseSize(); in BaseEnd()
131 MemMap* RemapAtEnd(byte* new_end, const char* tail_name, int tail_prot,
145 MemMap(const std::string& name, byte* begin, size_t size, void* base_begin, size_t base_size,
156 byte* const begin_; // Start of data.
Doat_file.h51 byte* requested_base,
153 OatMethod(const byte* base, const uint32_t code_offset);
166 const byte* begin_;
262 const byte* dex_file_pointer,
269 const byte* const dex_file_pointer_;
289 const byte* Begin() const;
290 const byte* End() const;
297 byte* requested_base,
302 byte* requested_base,
309 bool Dlopen(const std::string& elf_filename, byte* requested_base, std::string* error_msg);
[all …]
Ddex_file_verifier_test.cc33 static const byte kBase64Map[256] = {
58 static inline byte* DecodeBase64(const char* src, size_t* dst_size) { in DecodeBase64()
59 std::vector<byte> tmp; in DecodeBase64()
63 byte c = kBase64Map[src[i] & 0xFF]; in DecodeBase64()
94 std::unique_ptr<byte[]> dst(new byte[tmp.size()]); in DecodeBase64()
109 std::unique_ptr<byte[]> dex_bytes(DecodeBase64(base64, &length)); in OpenDexFileBase64()
159 static void FixUpChecksum(byte* dex_file) { in FixUpChecksum()
164 const byte* non_sum_ptr = dex_file + non_sum; in FixUpChecksum()
169 static const DexFile* FixChecksumAndOpen(byte* bytes, size_t length, const char* location, in FixChecksumAndOpen()
205 std::unique_ptr<byte[]> dex_bytes(DecodeBase64(kGoodTestDex, &length)); in ModifyAndLoad()
Ddex_file_verifier.h29 static bool Verify(const DexFile* dex_file, const byte* begin, size_t size,
37 DexFileVerifier(const DexFile* dex_file, const byte* begin, size_t size, const char* location) in DexFileVerifier()
48 bool CheckList(size_t element_size, const char* label, const byte* *ptr);
84 uint16_t FindFirstClassDataDefiner(const byte* ptr, bool* success);
85 uint16_t FindFirstAnnotationsDirectoryDefiner(const byte* ptr, bool* success);
115 const byte* const begin_;
121 const byte* ptr_;
Dmem_map.cc194 static bool CheckMapRequest(byte* expected_ptr, void* actual_ptr, size_t byte_count, in CheckMapRequest()
239 MemMap* MemMap::MapAnonymous(const char* name, byte* expected_ptr, size_t byte_count, int prot, in MapAnonymous()
382 return new MemMap(name, reinterpret_cast<byte*>(actual), byte_count, actual, in MapAnonymous()
386 MemMap* MemMap::MapFileAtAddress(byte* expected_ptr, size_t byte_count, int prot, int flags, int fd, in MapFileAtAddress()
421 byte* page_aligned_expected = (expected_ptr == nullptr) ? nullptr : (expected_ptr - page_offset); in MapFileAtAddress()
423 byte* actual = reinterpret_cast<byte*>(mmap(page_aligned_expected, in MapFileAtAddress()
476 MemMap::MemMap(const std::string& name, byte* begin, size_t size, void* base_begin, in MemMap()
496 MemMap* MemMap::RemapAtEnd(byte* new_end, const char* tail_name, int tail_prot, in RemapAtEnd()
500 DCHECK_LE(begin_ + size_, reinterpret_cast<byte*>(base_begin_) + base_size_); in RemapAtEnd()
503 DCHECK(IsAligned<kPageSize>(reinterpret_cast<byte*>(base_begin_) + base_size_)); in RemapAtEnd()
[all …]
Dmem_map_test.cc27 static byte* BaseBegin(MemMap* mem_map) { in BaseBegin()
28 return reinterpret_cast<byte*>(mem_map->base_begin_); in BaseBegin()
46 byte* base0 = m0->Begin(); in RemapAtEndTest()
63 byte* base1 = m1->Begin(); in RemapAtEndTest()
168 reinterpret_cast<byte*>(ART_BASE_ADDRESS), in TEST_F()
188 reinterpret_cast<byte*>(map1->BaseBegin()), in TEST_F()
212 reinterpret_cast<byte*>(start_addr), in TEST_F()
228 reinterpret_cast<byte*>(ptr), in TEST_F()
242 reinterpret_cast<byte*>(UINT64_C(0x100000000)), in TEST_F()
255 reinterpret_cast<byte*>(0xF0000000), in TEST_F()
[all …]
Delf_file.cc45 const byte *symfile_addr_;
70 static JITCodeEntry* CreateCodeEntry(const byte *symfile_addr, in CreateCodeEntry()
237 reinterpret_cast<byte**>(&dynamic_section_start_), error_msg)) { in Setup()
252 reinterpret_cast<byte**>(&symtab_section_start_), error_msg)) { in Setup()
259 reinterpret_cast<byte**>(&dynsym_section_start_), error_msg)) { in Setup()
271 reinterpret_cast<byte**>(&dynstr_section_start_), error_msg)) { in Setup()
280 reinterpret_cast<byte**>(&strtab_section_start_), error_msg)) { in Setup()
288 if (reinterpret_cast<byte*>(dynamic_section_start_) != in Setup()
300 reinterpret_cast<byte**>(&hash_section_start_), error_msg)) { in Setup()
328 byte** target, std::string* error_msg) { in CheckAndSet()
[all …]
/art/runtime/gc/allocator/
Drosalloc.h46 byte magic_num_; // The magic number used for debugging only.
52 const byte* fpr_base = reinterpret_cast<const byte*>(this); in ByteSize()
62 byte* fpr_base = reinterpret_cast<byte*>(this); in SetByteSize()
70 byte* fpr_base = reinterpret_cast<byte*>(this); in End()
71 byte* end = fpr_base + ByteSize(rosalloc); in End()
80 …return reinterpret_cast<byte*>(this) + ByteSize(rosalloc) == rosalloc->base_ + rosalloc->footprint… in IsAtEndOfSpace()
100 byte* start = reinterpret_cast<byte*>(this); in ReleasePages()
153 byte magic_num_; // The magic number used for debugging.
154 byte size_bracket_idx_; // The index of the size bracket of this run.
155 byte is_thread_local_; // True if this run is used as a thread-local run.
[all …]
/art/runtime/gc/space/
Dspace.h249 byte* Begin() const { in Begin()
254 byte* End() const { in End()
259 byte* Limit() const { in Limit()
265 void SetEnd(byte* end) { in SetEnd()
269 void SetLimit(byte* limit) { in SetLimit()
289 const byte* byte_ptr = reinterpret_cast<const byte*>(obj); in HasAddress()
305 byte* begin, byte* end, byte* limit) : in ContinuousSpace()
310 byte* begin_;
313 Atomic<byte*> end_;
316 byte* limit_;
[all …]
Dvalgrind_malloc_space-inl.h42 reinterpret_cast<byte*>(obj_with_rdz) + kValgrindRedZoneBytes); in AllocWithGrowth()
45 VALGRIND_MAKE_MEM_NOACCESS(reinterpret_cast<byte*>(result) + num_bytes, kValgrindRedZoneBytes); in AllocWithGrowth()
59 reinterpret_cast<byte*>(obj_with_rdz) + kValgrindRedZoneBytes); in Alloc()
62 VALGRIND_MAKE_MEM_NOACCESS(reinterpret_cast<byte*>(result) + num_bytes, kValgrindRedZoneBytes); in Alloc()
69 reinterpret_cast<byte*>(obj) - kValgrindRedZoneBytes), usable_size); in AllocationSize()
76 void* obj_with_rdz = reinterpret_cast<byte*>(obj_after_rdz) - kValgrindRedZoneBytes; in Free()
96 A allocator, byte* begin, in ValgrindMallocSpace()
97 byte* end, byte* limit, size_t growth_limit, in ValgrindMallocSpace()
Dlarge_object_space.h74 byte* Begin() const { in Begin()
78 byte* End() const { in End()
87 const byte* byte_obj = reinterpret_cast<const byte*>(obj); in Contains()
94 explicit LargeObjectSpace(const std::string& name, byte* begin, byte* end);
103 byte* begin_;
104 byte* end_;
146 static FreeListSpace* Create(const std::string& name, byte* requested_begin, size_t capacity);
156 FreeListSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end);
Dbump_pointer_space.cc28 byte* requested_begin) { in Create()
45 BumpPointerSpace::BumpPointerSpace(const std::string& name, byte* begin, byte* limit) in BumpPointerSpace()
137 byte* BumpPointerSpace::AllocBlock(size_t bytes) { in AllocBlock()
142 byte* storage = reinterpret_cast<byte*>( in AllocBlock()
154 byte* pos = Begin(); in Walk()
155 byte* end = End(); in Walk()
156 byte* main_end = pos; in Walk()
182 pos = reinterpret_cast<byte*>(GetNextObject(obj)); in Walk()
192 CHECK_LE(reinterpret_cast<const byte*>(end), End()); in Walk()
253 byte* start = AllocBlock(bytes); in AllocNewTlab()
Ddlmalloc_space.h47 size_t capacity, byte* requested_begin, bool can_move_objects);
111 byte* begin, byte* end, byte* limit, size_t growth_limit,
131 DlMallocSpace(const std::string& name, MemMap* mem_map, void* mspace, byte* begin, byte* end,
132 byte* limit, size_t growth_limit, bool can_move_objects, size_t starting_size,
Dmalloc_space.h118 byte* begin, byte* end, byte* limit, size_t growth_limit,
141 MallocSpace(const std::string& name, MemMap* mem_map, byte* begin, byte* end,
142 byte* limit, size_t growth_limit, bool create_bitmaps, bool can_move_objects,
146 size_t* growth_limit, size_t* capacity, byte* requested_begin);
Dbump_pointer_space.h45 static BumpPointerSpace* Create(const std::string& name, size_t capacity, byte* requested_begin);
124 const byte* byte_obj = reinterpret_cast<const byte*>(obj); in Contains()
129 BumpPointerSpace(const std::string& name, byte* begin, byte* limit);
164 byte* AllocBlock(size_t bytes) EXCLUSIVE_LOCKS_REQUIRED(block_lock_);
172 byte* growth_end_;
/art/test/041-narrowing/
Dexpected.txt9 (byte) Double.NaN = 00 expected: 00
14 (byte) Float.NaN = 00 expected: 00
19 (byte) Double.POSITIVE_INFINITY = ff expected: ff
24 (byte) Double.NEGATIVE_INFINITY = 00 expected: 00
29 (byte) Float.POSITIVE_INFINITY = ff expected: ff
34 (byte) Float.NEGATIVE_INFINITY = 00 expected: 00

12345