Searched refs:ResTable_sparseTypeEntry (Results 1 – 5 of 5) sorted by relevance
34 mLength = ResTable_sparseTypeEntry{entryIndices[entryCount - 1]}.idx + 1; in TypeVariant()48 return dtohs(ResTable_sparseTypeEntry{entry}.idx) < index; in keyCompare()71 || dtohs(ResTable_sparseTypeEntry{*iter}.idx) != mIndex) { in operator *()75 entryOffset = static_cast<uint32_t>(dtohs(ResTable_sparseTypeEntry{*iter}.offset)) * 4u; in operator *()
258 const ResTable_sparseTypeEntry* sparse_indices = in GetEntryOffset()259 reinterpret_cast<const ResTable_sparseTypeEntry*>( in GetEntryOffset()261 const ResTable_sparseTypeEntry* sparse_indices_end = sparse_indices + entry_count; in GetEntryOffset()262 const ResTable_sparseTypeEntry* result = in GetEntryOffset()264 [](const ResTable_sparseTypeEntry& entry, uint16_t entry_idx) { in GetEntryOffset()
6365 static bool keyCompare(const ResTable_sparseTypeEntry& entry , uint16_t entryIdx) { in keyCompare()6475 const ResTable_sparseTypeEntry* sparseIndices = in getEntry()6476 reinterpret_cast<const ResTable_sparseTypeEntry*>(eindex); in getEntry()6477 const ResTable_sparseTypeEntry* result = std::lower_bound( in getEntry()7558 const ResTable_sparseTypeEntry* entry = in print()7559 reinterpret_cast<const ResTable_sparseTypeEntry*>( in print()
377 ResTable_sparseTypeEntry* indices = in FlattenConfig()378 type_writer.NextBlock<ResTable_sparseTypeEntry>(entries->size()); in FlattenConfig()
1437 union ResTable_sparseTypeEntry { union1449 static_assert(sizeof(ResTable_sparseTypeEntry) == sizeof(uint32_t),