• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2018 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_LIBARTBASE_BASE_BIT_TABLE_H_
18 #define ART_LIBARTBASE_BASE_BIT_TABLE_H_
19 
20 #include <array>
21 #include <initializer_list>
22 #include <numeric>
23 #include <string.h>
24 #include <type_traits>
25 #include <unordered_map>
26 
27 #include "base/bit_memory_region.h"
28 #include "base/casts.h"
29 #include "base/iteration_range.h"
30 #include "base/memory_region.h"
31 #include "base/scoped_arena_containers.h"
32 #include "base/stl_util.h"
33 
34 namespace art {
35 
36 // Generic purpose table of uint32_t values, which are tightly packed at bit level.
37 // It has its own header with the number of rows and the bit-widths of all columns.
38 // The values are accessible by (row, column).  The value -1 is stored efficiently.
39 template<uint32_t kNumColumns>
40 class BitTableBase {
41  public:
42   static constexpr uint32_t kNoValue = std::numeric_limits<uint32_t>::max();  // == -1.
43   static constexpr uint32_t kValueBias = kNoValue;  // Bias so that -1 is encoded as 0.
44 
BitTableBase()45   BitTableBase() {}
BitTableBase(BitMemoryReader & reader)46   explicit BitTableBase(BitMemoryReader& reader) {
47     Decode(reader);
48   }
49 
Decode(BitMemoryReader & reader)50   ALWAYS_INLINE void Decode(BitMemoryReader& reader) {
51     // Decode row count and column sizes from the table header.
52     std::array<uint32_t, 1+kNumColumns> header = reader.ReadInterleavedVarints<1+kNumColumns>();
53     num_rows_ = header[0];
54     column_offset_[0] = 0;
55     for (uint32_t i = 0; i < kNumColumns; i++) {
56       size_t column_end = column_offset_[i] + header[i + 1];
57       column_offset_[i + 1] = dchecked_integral_cast<uint16_t>(column_end);
58     }
59 
60     // Record the region which contains the table data and skip past it.
61     table_data_ = reader.ReadRegion(num_rows_ * NumRowBits());
62   }
63 
64   ALWAYS_INLINE uint32_t Get(uint32_t row, uint32_t column = 0) const {
65     DCHECK(table_data_.IsValid()) << "Table has not been loaded";
66     DCHECK_LT(row, num_rows_);
67     DCHECK_LT(column, kNumColumns);
68     size_t offset = row * NumRowBits() + column_offset_[column];
69     return table_data_.LoadBits(offset, NumColumnBits(column)) + kValueBias;
70   }
71 
72   ALWAYS_INLINE BitMemoryRegion GetBitMemoryRegion(uint32_t row, uint32_t column = 0) const {
73     DCHECK(table_data_.IsValid()) << "Table has not been loaded";
74     DCHECK_LT(row, num_rows_);
75     DCHECK_LT(column, kNumColumns);
76     size_t offset = row * NumRowBits() + column_offset_[column];
77     return table_data_.Subregion(offset, NumColumnBits(column));
78   }
79 
NumRows()80   uint32_t NumRows() const { return num_rows_; }
81 
NumRowBits()82   uint32_t NumRowBits() const { return column_offset_[kNumColumns]; }
83 
NumColumns()84   constexpr uint32_t NumColumns() const { return kNumColumns; }
85 
NumColumnBits(uint32_t column)86   uint32_t NumColumnBits(uint32_t column) const {
87     return column_offset_[column + 1] - column_offset_[column];
88   }
89 
DataBitSize()90   size_t DataBitSize() const { return table_data_.size_in_bits(); }
91 
Equals(const BitTableBase & other)92   bool Equals(const BitTableBase& other) const {
93     return num_rows_ == other.num_rows_ &&
94         std::equal(column_offset_, column_offset_ + kNumColumns, other.column_offset_) &&
95         BitMemoryRegion::Equals(table_data_, other.table_data_);
96   }
97 
98  protected:
99   BitMemoryRegion table_data_;
100   uint32_t num_rows_ = 0;
101   uint16_t column_offset_[kNumColumns + 1] = {};
102 };
103 
104 // Helper class which can be used to create BitTable accessors with named getters.
105 template<uint32_t NumColumns>
106 class BitTableAccessor {
107  public:
108   static constexpr uint32_t kNumColumns = NumColumns;
109   static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue;
110 
111   BitTableAccessor() = default;
BitTableAccessor(const BitTableBase<kNumColumns> * table,uint32_t row)112   BitTableAccessor(const BitTableBase<kNumColumns>* table, uint32_t row)
113       : table_(table), row_(row) {
114     DCHECK(table_ != nullptr);
115   }
116 
Row()117   ALWAYS_INLINE uint32_t Row() const { return row_; }
118 
IsValid()119   ALWAYS_INLINE bool IsValid() const { return row_ < table_->NumRows(); }
120 
Equals(const BitTableAccessor & other)121   ALWAYS_INLINE bool Equals(const BitTableAccessor& other) {
122     return this->table_ == other.table_ && this->row_ == other.row_;
123   }
124 
125 // Helper macro to create constructors and per-table utilities in derived class.
126 #define BIT_TABLE_HEADER(NAME)                                                       \
127   using BitTableAccessor<kNumColumns>::BitTableAccessor; /* inherit constructors */  \
128   template<int COLUMN, int UNUSED /*needed to compile*/> struct ColumnName;          \
129   static constexpr const char* kTableName = #NAME;                                   \
130 
131 // Helper macro to create named column accessors in derived class.
132 #define BIT_TABLE_COLUMN(COLUMN, NAME)                                               \
133   static constexpr uint32_t k##NAME = COLUMN;                                        \
134   ALWAYS_INLINE uint32_t Get##NAME() const { return table_->Get(row_, COLUMN); }     \
135   ALWAYS_INLINE bool Has##NAME() const { return Get##NAME() != kNoValue; }           \
136   template<int UNUSED> struct ColumnName<COLUMN, UNUSED> {                           \
137     static constexpr const char* Value = #NAME;                                      \
138   };                                                                                 \
139 
140  protected:
141   const BitTableBase<kNumColumns>* table_ = nullptr;
142   uint32_t row_ = -1;
143 };
144 
145 // Template meta-programming helper.
146 template<typename Accessor, size_t... Columns>
GetBitTableColumnNamesImpl(std::index_sequence<Columns...>)147 static const char* const* GetBitTableColumnNamesImpl(std::index_sequence<Columns...>) {
148   static const char* names[] = { Accessor::template ColumnName<Columns, 0>::Value... };
149   return names;
150 }
151 
152 // Wrapper which makes it easier to use named accessors for the individual rows.
153 template<typename Accessor>
154 class BitTable : public BitTableBase<Accessor::kNumColumns> {
155  public:
156   class const_iterator : public std::iterator<std::random_access_iterator_tag,
157                                               /* value_type */ Accessor,
158                                               /* difference_type */ int32_t,
159                                               /* pointer */ void,
160                                               /* reference */ void> {
161    public:
162     using difference_type = int32_t;
const_iterator()163     const_iterator() {}
const_iterator(const BitTable * table,uint32_t row)164     const_iterator(const BitTable* table, uint32_t row) : table_(table), row_(row) {}
165     const_iterator operator+(difference_type n) { return const_iterator(table_, row_ + n); }
166     const_iterator operator-(difference_type n) { return const_iterator(table_, row_ - n); }
167     difference_type operator-(const const_iterator& other) { return row_ - other.row_; }
168     void operator+=(difference_type rows) { row_ += rows; }
169     void operator-=(difference_type rows) { row_ -= rows; }
170     const_iterator operator++() { return const_iterator(table_, ++row_); }
171     const_iterator operator--() { return const_iterator(table_, --row_); }
172     const_iterator operator++(int) { return const_iterator(table_, row_++); }
173     const_iterator operator--(int) { return const_iterator(table_, row_--); }
174     bool operator==(const_iterator i) const { DCHECK(table_ == i.table_); return row_ == i.row_; }
175     bool operator!=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ != i.row_; }
176     bool operator<=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ <= i.row_; }
177     bool operator>=(const_iterator i) const { DCHECK(table_ == i.table_); return row_ >= i.row_; }
178     bool operator<(const_iterator i) const { DCHECK(table_ == i.table_); return row_ < i.row_; }
179     bool operator>(const_iterator i) const { DCHECK(table_ == i.table_); return row_ > i.row_; }
180     Accessor operator*() {
181       DCHECK_LT(row_, table_->NumRows());
182       return Accessor(table_, row_);
183     }
184     Accessor operator->() {
185       DCHECK_LT(row_, table_->NumRows());
186       return Accessor(table_, row_);
187     }
188     Accessor operator[](size_t index) {
189       DCHECK_LT(row_ + index, table_->NumRows());
190       return Accessor(table_, row_ + index);
191     }
192    private:
193     const BitTable* table_ = nullptr;
194     uint32_t row_ = 0;
195   };
196 
197   using BitTableBase<Accessor::kNumColumns>::BitTableBase;  // Constructors.
198 
begin()199   ALWAYS_INLINE const_iterator begin() const { return const_iterator(this, 0); }
end()200   ALWAYS_INLINE const_iterator end() const { return const_iterator(this, this->NumRows()); }
201 
GetRow(uint32_t row)202   ALWAYS_INLINE Accessor GetRow(uint32_t row) const {
203     return Accessor(this, row);
204   }
205 
GetInvalidRow()206   ALWAYS_INLINE Accessor GetInvalidRow() const {
207     return Accessor(this, static_cast<uint32_t>(-1));
208   }
209 
GetName()210   const char* GetName() const {
211     return Accessor::kTableName;
212   }
213 
GetColumnNames()214   const char* const* GetColumnNames() const {
215     return GetBitTableColumnNamesImpl<Accessor>(std::make_index_sequence<Accessor::kNumColumns>());
216   }
217 };
218 
219 template<typename Accessor>
220 typename BitTable<Accessor>::const_iterator operator+(
221     typename BitTable<Accessor>::const_iterator::difference_type n,
222     typename BitTable<Accessor>::const_iterator a) {
223   return a + n;
224 }
225 
226 template<typename Accessor>
227 class BitTableRange : public IterationRange<typename BitTable<Accessor>::const_iterator> {
228  public:
229   using const_iterator = typename BitTable<Accessor>::const_iterator;
230 
231   using IterationRange<const_iterator>::IterationRange;
BitTableRange()232   BitTableRange() : IterationRange<const_iterator>(const_iterator(), const_iterator()) { }
233 
empty()234   bool empty() const { return this->begin() == this->end(); }
size()235   size_t size() const { return this->end() - this->begin(); }
236 
237   Accessor operator[](size_t index) const {
238     const_iterator it = this->begin() + index;
239     DCHECK(it < this->end());
240     return *it;
241   }
242 
back()243   Accessor back() const {
244     DCHECK(!empty());
245     return *(this->end() - 1);
246   }
247 
pop_back()248   void pop_back() {
249     DCHECK(!empty());
250     --this->last_;
251   }
252 };
253 
254 // Helper class for encoding BitTable. It can optionally de-duplicate the inputs.
255 template<uint32_t kNumColumns>
256 class BitTableBuilderBase {
257  public:
258   static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue;
259   static constexpr uint32_t kValueBias = BitTableBase<kNumColumns>::kValueBias;
260 
261   class Entry {
262    public:
Entry()263     Entry() {
264       // The definition of kNoValue here is for host and target debug builds which complain about
265       // missing a symbol definition for BitTableBase<N>::kNovValue when optimization is off.
266       static constexpr uint32_t kNoValue = BitTableBase<kNumColumns>::kNoValue;
267       std::fill_n(data_, kNumColumns, kNoValue);
268     }
269 
Entry(std::initializer_list<uint32_t> values)270     Entry(std::initializer_list<uint32_t> values) {
271       DCHECK_EQ(values.size(), kNumColumns);
272       std::copy(values.begin(), values.end(), data_);
273     }
274 
275     uint32_t& operator[](size_t column) {
276       DCHECK_LT(column, kNumColumns);
277       return data_[column];
278     }
279 
280     uint32_t operator[](size_t column) const {
281       DCHECK_LT(column, kNumColumns);
282       return data_[column];
283     }
284 
285    private:
286     uint32_t data_[kNumColumns];
287   };
288 
BitTableBuilderBase(ScopedArenaAllocator * allocator)289   explicit BitTableBuilderBase(ScopedArenaAllocator* allocator)
290       : rows_(allocator->Adapter(kArenaAllocBitTableBuilder)),
291         dedup_(8, allocator->Adapter(kArenaAllocBitTableBuilder)) {
292   }
293 
294   Entry& operator[](size_t row) { return rows_[row]; }
295   const Entry& operator[](size_t row) const { return rows_[row]; }
back()296   const Entry& back() const { return rows_.back(); }
size()297   size_t size() const { return rows_.size(); }
298 
299   // Append given value to the vector without de-duplication.
300   // This will not add the element to the dedup map to avoid its associated costs.
Add(Entry value)301   void Add(Entry value) {
302     rows_.push_back(value);
303   }
304 
305   // Append given list of values and return the index of the first value.
306   // If the exact same set of values was already added, return the old index.
307   uint32_t Dedup(Entry* values, size_t count = 1) {
308     FNVHash<MemoryRegion> hasher;
309     uint32_t hash = hasher(MemoryRegion(values, sizeof(Entry) * count));
310 
311     // Check if we have already added identical set of values.
312     auto range = dedup_.equal_range(hash);
313     for (auto it = range.first; it != range.second; ++it) {
314       uint32_t index = it->second;
315       if (count <= size() - index &&
316           std::equal(values,
317                      values + count,
318                      rows_.begin() + index,
319                      [](const Entry& lhs, const Entry& rhs) {
320                        return memcmp(&lhs, &rhs, sizeof(Entry)) == 0;
321                      })) {
322         return index;
323       }
324     }
325 
326     // Add the set of values and add the index to the dedup map.
327     uint32_t index = size();
328     rows_.insert(rows_.end(), values, values + count);
329     dedup_.emplace(hash, index);
330     return index;
331   }
332 
Dedup(Entry value)333   uint32_t Dedup(Entry value) {
334     return Dedup(&value, /* count */ 1);
335   }
336 
337   // Calculate the column bit widths based on the current data.
Measure(uint32_t * column_bits)338   void Measure(/*out*/ uint32_t* column_bits) const {
339     uint32_t max_column_value[kNumColumns];
340     std::fill_n(max_column_value, kNumColumns, 0);
341     for (uint32_t r = 0; r < size(); r++) {
342       for (uint32_t c = 0; c < kNumColumns; c++) {
343         max_column_value[c] |= rows_[r][c] - kValueBias;
344       }
345     }
346     for (uint32_t c = 0; c < kNumColumns; c++) {
347       column_bits[c] = MinimumBitsToStore(max_column_value[c]);
348     }
349   }
350 
351   // Encode the stored data into a BitTable.
352   template<typename Vector>
Encode(BitMemoryWriter<Vector> & out)353   void Encode(BitMemoryWriter<Vector>& out) const {
354     size_t initial_bit_offset = out.NumberOfWrittenBits();
355 
356     // Write table header.
357     std::array<uint32_t, 1 + kNumColumns> header;
358     header[0] = size();
359     uint32_t* column_bits = header.data() + 1;
360     Measure(column_bits);
361     out.WriteInterleavedVarints(header);
362 
363     // Write table data.
364     for (uint32_t r = 0; r < size(); r++) {
365       for (uint32_t c = 0; c < kNumColumns; c++) {
366         out.WriteBits(rows_[r][c] - kValueBias, column_bits[c]);
367       }
368     }
369 
370     // Verify the written data.
371     if (kIsDebugBuild) {
372       BitTableBase<kNumColumns> table;
373       BitMemoryReader reader(out.GetWrittenRegion().Subregion(initial_bit_offset));
374       table.Decode(reader);
375       DCHECK_EQ(size(), table.NumRows());
376       for (uint32_t c = 0; c < kNumColumns; c++) {
377         DCHECK_EQ(column_bits[c], table.NumColumnBits(c));
378       }
379       for (uint32_t r = 0; r < size(); r++) {
380         for (uint32_t c = 0; c < kNumColumns; c++) {
381           DCHECK_EQ(rows_[r][c], table.Get(r, c)) << " (" << r << ", " << c << ")";
382         }
383       }
384     }
385   }
386 
387  protected:
388   ScopedArenaDeque<Entry> rows_;
389   ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_;  // Hash -> row index.
390 };
391 
392 template<typename Accessor>
393 class BitTableBuilder : public BitTableBuilderBase<Accessor::kNumColumns> {
394  public:
395   using BitTableBuilderBase<Accessor::kNumColumns>::BitTableBuilderBase;  // Constructors.
396 };
397 
398 // Helper class for encoding single-column BitTable of bitmaps (allows more than 32 bits).
399 class BitmapTableBuilder {
400  public:
BitmapTableBuilder(ScopedArenaAllocator * const allocator)401   explicit BitmapTableBuilder(ScopedArenaAllocator* const allocator)
402       : allocator_(allocator),
403         rows_(allocator->Adapter(kArenaAllocBitTableBuilder)),
404         dedup_(8, allocator_->Adapter(kArenaAllocBitTableBuilder)) {
405   }
406 
407   MemoryRegion operator[](size_t row) { return rows_[row]; }
408   const MemoryRegion operator[](size_t row) const { return rows_[row]; }
size()409   size_t size() const { return rows_.size(); }
410 
411   // Add the given bitmap to the table and return its index.
412   // If the bitmap was already added it will be deduplicated.
413   // The last bit must be set and any padding bits in the last byte must be zero.
Dedup(const void * bitmap,size_t num_bits)414   uint32_t Dedup(const void* bitmap, size_t num_bits) {
415     MemoryRegion region(const_cast<void*>(bitmap), BitsToBytesRoundUp(num_bits));
416     DCHECK(num_bits == 0 || BitMemoryRegion(region).LoadBit(num_bits - 1) == 1);
417     DCHECK_EQ(BitMemoryRegion(region).LoadBits(num_bits, region.size_in_bits() - num_bits), 0u);
418     FNVHash<MemoryRegion> hasher;
419     uint32_t hash = hasher(region);
420 
421     // Check if we have already added identical bitmap.
422     auto range = dedup_.equal_range(hash);
423     for (auto it = range.first; it != range.second; ++it) {
424       if (MemoryRegion::ContentEquals()(region, rows_[it->second])) {
425         return it->second;
426       }
427     }
428 
429     // Add the bitmap and add the index to the dedup map.
430     uint32_t index = size();
431     void* copy = allocator_->Alloc(region.size(), kArenaAllocBitTableBuilder);
432     memcpy(copy, region.pointer(), region.size());
433     rows_.push_back(MemoryRegion(copy, region.size()));
434     dedup_.emplace(hash, index);
435     max_num_bits_ = std::max(max_num_bits_, num_bits);
436     return index;
437   }
438 
439   // Encode the stored data into a BitTable.
440   template<typename Vector>
Encode(BitMemoryWriter<Vector> & out)441   void Encode(BitMemoryWriter<Vector>& out) const {
442     size_t initial_bit_offset = out.NumberOfWrittenBits();
443 
444     // Write table header.
445     out.WriteInterleavedVarints(std::array<uint32_t, 2>{
446       dchecked_integral_cast<uint32_t>(size()),
447       dchecked_integral_cast<uint32_t>(max_num_bits_),
448     });
449 
450     // Write table data.
451     for (MemoryRegion row : rows_) {
452       size_t bits_to_copy = std::min(max_num_bits_, row.size_in_bits());
453       BitMemoryRegion src(row, /*bit_offset=*/ 0u, bits_to_copy);
454       BitMemoryRegion dst = out.Allocate(max_num_bits_);
455       dst.Subregion(/*bit_offset=*/ 0, bits_to_copy).CopyBits(src);
456     }
457 
458     // Verify the written data.
459     if (kIsDebugBuild) {
460       BitTableBase<1> table;
461       BitMemoryReader reader(out.GetWrittenRegion().Subregion(initial_bit_offset));
462       table.Decode(reader);
463       DCHECK_EQ(size(), table.NumRows());
464       DCHECK_EQ(max_num_bits_, table.NumColumnBits(0));
465       for (uint32_t r = 0; r < size(); r++) {
466         BitMemoryRegion expected(rows_[r]);
467         BitMemoryRegion seen = table.GetBitMemoryRegion(r);
468         size_t num_bits = std::max(expected.size_in_bits(), seen.size_in_bits());
469         for (size_t b = 0; b < num_bits; b++) {
470           bool e = b < expected.size_in_bits() && expected.LoadBit(b);
471           bool s = b < seen.size_in_bits() && seen.LoadBit(b);
472           DCHECK_EQ(e, s) << " (" << r << ")[" << b << "]";
473         }
474       }
475     }
476   }
477 
478  private:
479   ScopedArenaAllocator* const allocator_;
480   ScopedArenaDeque<MemoryRegion> rows_;
481   ScopedArenaUnorderedMultimap<uint32_t, uint32_t> dedup_;  // Hash -> row index.
482   size_t max_num_bits_ = 0u;
483 };
484 
485 }  // namespace art
486 
487 #endif  // ART_LIBARTBASE_BASE_BIT_TABLE_H_
488