• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2021 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ANDROID_BASE_MRUCACHE_
18 #define ANDROID_BASE_MRUCACHE_
19 
20 #include <algorithm>
21 #include <list>
22 #include <map>
23 
24 namespace android {
25 namespace base {
26 
27 // A trivial MRU cache. Not thread-safe.
28 template <class K, class V>
29 class MruCache {
30 public:
31     template <class S>
32     struct EntryWithSize {
EntryWithSizeEntryWithSize33         EntryWithSize(const S&& d) :
34             EntryWithSize(std::move(d), 0) {}
35 
EntryWithSizeEntryWithSize36         EntryWithSize(const S&& d, const size_t ds) :
37             data(std::move(d)),
38             dataSize(ds) {
39             static_assert(std::is_same<S, K>::value ||
40                 std::is_same<S, V>::value, "Cache entry instantiated with invalid types");
41         }
42 
43         const S data;
44         size_t dataSize;
45 
46         bool operator==(const EntryWithSize& rhs) const { return data == rhs.data; }
47         bool operator<(const EntryWithSize& rhs) const { return data < rhs.data; }
48 
49     };
50 
51     class MruCacheObserver {
52     public:
53         virtual void cacheChanged() = 0;
~MruCacheObserver()54         virtual ~MruCacheObserver() {}
55     };
56 
57     class CacheFlattener {
58     public:
59         virtual void handleFlatten(std::map<EntryWithSize<K>, EntryWithSize<V>>& mCache, void* buf, size_t bufSize) = 0;
~CacheFlattener()60         virtual ~CacheFlattener() {}
61     };
62 
MruCache(size_t maxEntries,CacheFlattener * cacheFlattener)63     MruCache(size_t maxEntries, CacheFlattener* cacheFlattener) :
64         mMaxEntries(maxEntries),
65         mCacheFlattener(cacheFlattener) {}
66 
put(const K & key,size_t keySize,V && value,size_t valueSize)67     bool put(const K& key, size_t keySize, V&& value, size_t valueSize) {
68         evictIfNecessary();
69 
70         EntryWithSize<K> cacheKey(std::move(key), keySize);
71         EntryWithSize<V> cacheValue(std::move(value), valueSize);
72 
73         auto exists = mCache.find(cacheKey);
74         if (exists != mCache.end()) {
75             auto iter = std::find(mMostRecents.begin(), mMostRecents.end(), cacheKey);
76             mMostRecents.splice(mMostRecents.begin(), mMostRecents, iter);
77             mCache.erase(exists);
78        }
79        else {
80             mMostRecents.push_front(cacheKey);
81        }
82 
83         const auto [_, res] = mCache.insert({ cacheKey, std::move(cacheValue) });
84 
85         if (mCacheObserver != nullptr && res) {
86             mCacheObserver->cacheChanged();
87         }
88 
89         return res;
90     }
91 
flatten(void * buf,size_t bufSize)92     void flatten(void* buf, size_t bufSize) {
93         if (mCacheFlattener) {
94             mCacheFlattener->handleFlatten(mCache, buf, bufSize);
95         }
96     }
97 
get(const K & key,const V ** value)98     bool get(const K& key, const V** value) {
99         EntryWithSize<K> cacheKey(std::move(key));
100         auto res = mCache.find(cacheKey);
101 
102         if (res == mCache.end()) {
103             return false;
104         }
105 
106         *value = &(res->second.data);
107         auto iter = std::find(mMostRecents.begin(), mMostRecents.end(), cacheKey);
108         mMostRecents.splice(mMostRecents.begin(), mMostRecents, iter);
109 
110         return true;
111     }
112 
setObserver(MruCacheObserver * observer)113     void setObserver(MruCacheObserver* observer) {
114         mCacheObserver = observer;
115     }
116 
117 private:
118     using MruCacheMap = std::map<EntryWithSize<K>, EntryWithSize<V>>;
119     using MostRecentList = std::list<EntryWithSize<K>>;
120 
evictIfNecessary()121     void evictIfNecessary() {
122         auto entryCount = mMostRecents.size();
123         if (entryCount >= mMaxEntries) {
124             auto threshold = entryCount * 0.9;
125 
126             for (int i = mMostRecents.size(); i > threshold; i--) {
127                 EntryWithSize<K> key = mMostRecents.front();
128                 mMostRecents.pop_front();
129                 mCache.erase(key);
130             }
131         }
132     }
133 
134     MruCacheMap mCache;
135     MostRecentList mMostRecents;
136     MruCacheObserver* mCacheObserver;
137     CacheFlattener* mCacheFlattener;
138     const size_t mMaxEntries;
139 };
140 
141 }
142 }
143 
144 #endif
145