• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* libs/pixelflinger/codeflinger/CodeCache.cpp
2 **
3 ** Copyright 2006, The Android Open Source Project
4 **
5 ** Licensed under the Apache License, Version 2.0 (the "License");
6 ** you may not use this file except in compliance with the License.
7 ** You may obtain a copy of the License at
8 **
9 **     http://www.apache.org/licenses/LICENSE-2.0
10 **
11 ** Unless required by applicable law or agreed to in writing, software
12 ** distributed under the License is distributed on an "AS IS" BASIS,
13 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 ** See the License for the specific language governing permissions and
15 ** limitations under the License.
16 */
17 
18 
19 #include <assert.h>
20 #include <stdio.h>
21 #include <stdlib.h>
22 #include <unistd.h>
23 #include <sys/mman.h>
24 
25 #include <cutils/log.h>
26 #include <cutils/atomic.h>
27 
28 #include "codeflinger/CodeCache.h"
29 
30 namespace android {
31 
32 // ----------------------------------------------------------------------------
33 
34 #if defined(__arm__)
35 #include <unistd.h>
36 #include <errno.h>
37 #endif
38 
39 // ----------------------------------------------------------------------------
40 
Assembly(size_t size)41 Assembly::Assembly(size_t size)
42     : mCount(1), mSize(0)
43 {
44     mBase = (uint32_t*)mspace_malloc(getMspace(), size);
45     mSize = size;
46     ensureMbaseExecutable();
47 }
48 
~Assembly()49 Assembly::~Assembly()
50 {
51     mspace_free(getMspace(), mBase);
52 }
53 
incStrong(const void *) const54 void Assembly::incStrong(const void*) const
55 {
56     android_atomic_inc(&mCount);
57 }
58 
decStrong(const void *) const59 void Assembly::decStrong(const void*) const
60 {
61     if (android_atomic_dec(&mCount) == 1) {
62         delete this;
63     }
64 }
65 
size() const66 ssize_t Assembly::size() const
67 {
68     if (!mBase) return NO_MEMORY;
69     return mSize;
70 }
71 
base() const72 uint32_t* Assembly::base() const
73 {
74     return mBase;
75 }
76 
resize(size_t newSize)77 ssize_t Assembly::resize(size_t newSize)
78 {
79     mBase = (uint32_t*)mspace_realloc(getMspace(), mBase, newSize);
80     mSize = newSize;
81     ensureMbaseExecutable();
82     return size();
83 }
84 
getMspace()85 mspace Assembly::getMspace()
86 {
87     static mspace msp = create_contiguous_mspace(2 * 1024, 1024 * 1024, /*locked=*/ false);
88     return msp;
89 }
90 
ensureMbaseExecutable()91 void Assembly::ensureMbaseExecutable()
92 {
93     long pagesize = sysconf(_SC_PAGESIZE);
94     long pagemask = ~(pagesize - 1);  // assumes pagesize is a power of 2
95 
96     uint32_t* pageStart = (uint32_t*) (((uintptr_t) mBase) & pagemask);
97     size_t adjustedLength = (mBase - pageStart) * sizeof(uint32_t) + mSize;
98 
99     if (mBase && mprotect(pageStart, adjustedLength, PROT_READ | PROT_WRITE | PROT_EXEC) != 0) {
100         mspace_free(getMspace(), mBase);
101         mBase = NULL;
102     }
103 }
104 
105 // ----------------------------------------------------------------------------
106 
CodeCache(size_t size)107 CodeCache::CodeCache(size_t size)
108     : mCacheSize(size), mCacheInUse(0)
109 {
110     pthread_mutex_init(&mLock, 0);
111 }
112 
~CodeCache()113 CodeCache::~CodeCache()
114 {
115     pthread_mutex_destroy(&mLock);
116 }
117 
lookup(const AssemblyKeyBase & keyBase) const118 sp<Assembly> CodeCache::lookup(const AssemblyKeyBase& keyBase) const
119 {
120     pthread_mutex_lock(&mLock);
121     sp<Assembly> r;
122     ssize_t index = mCacheData.indexOfKey(key_t(keyBase));
123     if (index >= 0) {
124         const cache_entry_t& e = mCacheData.valueAt(index);
125         e.when = mWhen++;
126         r = e.entry;
127     }
128     pthread_mutex_unlock(&mLock);
129     return r;
130 }
131 
cache(const AssemblyKeyBase & keyBase,const sp<Assembly> & assembly)132 int CodeCache::cache(  const AssemblyKeyBase& keyBase,
133                             const sp<Assembly>& assembly)
134 {
135     pthread_mutex_lock(&mLock);
136 
137     const ssize_t assemblySize = assembly->size();
138     while (mCacheInUse + assemblySize > mCacheSize) {
139         // evict the LRU
140         size_t lru = 0;
141         size_t count = mCacheData.size();
142         for (size_t i=0 ; i<count ; i++) {
143             const cache_entry_t& e = mCacheData.valueAt(i);
144             if (e.when < mCacheData.valueAt(lru).when) {
145                 lru = i;
146             }
147         }
148         const cache_entry_t& e = mCacheData.valueAt(lru);
149         mCacheInUse -= e.entry->size();
150         mCacheData.removeItemsAt(lru);
151     }
152 
153     ssize_t err = mCacheData.add(key_t(keyBase), cache_entry_t(assembly, mWhen));
154     if (err >= 0) {
155         mCacheInUse += assemblySize;
156         mWhen++;
157         // synchronize caches...
158 #if defined(__arm__)
159         const long base = long(assembly->base());
160         const long curr = base + long(assembly->size());
161         err = cacheflush(base, curr, 0);
162         ALOGE_IF(err, "__ARM_NR_cacheflush error %s\n",
163                 strerror(errno));
164 #endif
165     }
166 
167     pthread_mutex_unlock(&mLock);
168     return err;
169 }
170 
171 // ----------------------------------------------------------------------------
172 
173 }; // namespace android
174