1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 extern "C" {
28 #include <sys/mman.h>
29 }
30
31 #include "code-buffer-vixl.h"
32 #include "utils-vixl.h"
33
34 namespace vixl {
35
36 // BSD uses `MAP_ANON` instead of the Linux `MAP_ANONYMOUS`. The `MAP_ANONYMOUS`
37 // alias should generally be available, but is not always, so define it manually
38 // if necessary.
39 #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
40 #define MAP_ANONYMOUS MAP_ANON
41 #endif
42
CodeBuffer(size_t capacity)43 CodeBuffer::CodeBuffer(size_t capacity)
44 : buffer_(NULL),
45 managed_(true),
46 cursor_(NULL),
47 dirty_(false),
48 capacity_(capacity) {
49 if (capacity_ == 0) {
50 return;
51 }
52 #ifdef VIXL_CODE_BUFFER_MALLOC
53 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
54 #elif defined(VIXL_CODE_BUFFER_MMAP)
55 buffer_ = reinterpret_cast<byte*>(mmap(NULL,
56 capacity,
57 PROT_READ | PROT_WRITE,
58 MAP_PRIVATE | MAP_ANONYMOUS,
59 -1,
60 0));
61 #else
62 #error Unknown code buffer allocator.
63 #endif
64 VIXL_CHECK(buffer_ != NULL);
65 // Aarch64 instructions must be word aligned, we assert the default allocator
66 // always returns word align memory.
67 VIXL_ASSERT(IsWordAligned(buffer_));
68
69 cursor_ = buffer_;
70 }
71
72
CodeBuffer(byte * buffer,size_t capacity)73 CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
74 : buffer_(reinterpret_cast<byte*>(buffer)),
75 managed_(false),
76 cursor_(reinterpret_cast<byte*>(buffer)),
77 dirty_(false),
78 capacity_(capacity) {
79 VIXL_ASSERT(buffer_ != NULL);
80 }
81
82
~CodeBuffer()83 CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION {
84 VIXL_ASSERT(!IsDirty());
85 if (managed_) {
86 #ifdef VIXL_CODE_BUFFER_MALLOC
87 free(buffer_);
88 #elif defined(VIXL_CODE_BUFFER_MMAP)
89 munmap(buffer_, capacity_);
90 #else
91 #error Unknown code buffer allocator.
92 #endif
93 }
94 }
95
96
SetExecutable()97 void CodeBuffer::SetExecutable() {
98 #ifdef VIXL_CODE_BUFFER_MMAP
99 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
100 VIXL_CHECK(ret == 0);
101 #else
102 // This requires page-aligned memory blocks, which we can only guarantee with
103 // mmap.
104 VIXL_UNIMPLEMENTED();
105 #endif
106 }
107
108
SetWritable()109 void CodeBuffer::SetWritable() {
110 #ifdef VIXL_CODE_BUFFER_MMAP
111 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
112 VIXL_CHECK(ret == 0);
113 #else
114 // This requires page-aligned memory blocks, which we can only guarantee with
115 // mmap.
116 VIXL_UNIMPLEMENTED();
117 #endif
118 }
119
120
EmitString(const char * string)121 void CodeBuffer::EmitString(const char* string) {
122 VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
123 char* dst = reinterpret_cast<char*>(cursor_);
124 dirty_ = true;
125 char* null_char = stpcpy(dst, string);
126 cursor_ = reinterpret_cast<byte*>(null_char) + 1;
127 }
128
129
EmitData(const void * data,size_t size)130 void CodeBuffer::EmitData(const void* data, size_t size) {
131 VIXL_ASSERT(HasSpaceFor(size));
132 dirty_ = true;
133 memcpy(cursor_, data, size);
134 cursor_ = cursor_ + size;
135 }
136
137
UpdateData(size_t offset,const void * data,size_t size)138 void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
139 dirty_ = true;
140 byte* dst = buffer_ + offset;
141 VIXL_ASSERT(dst + size <= cursor_);
142 memcpy(dst, data, size);
143 }
144
145
Align()146 void CodeBuffer::Align() {
147 byte* end = AlignUp(cursor_, 4);
148 const size_t padding_size = end - cursor_;
149 VIXL_ASSERT(padding_size <= 4);
150 EmitZeroedBytes(static_cast<int>(padding_size));
151 }
152
EmitZeroedBytes(int n)153 void CodeBuffer::EmitZeroedBytes(int n) {
154 EnsureSpaceFor(n);
155 dirty_ = true;
156 memset(cursor_, 0, n);
157 cursor_ += n;
158 }
159
Reset()160 void CodeBuffer::Reset() {
161 #ifdef VIXL_DEBUG
162 if (managed_) {
163 // Fill with zeros (there is no useful value common to A32 and T32).
164 memset(buffer_, 0, capacity_);
165 }
166 #endif
167 cursor_ = buffer_;
168 SetClean();
169 }
170
171
Grow(size_t new_capacity)172 void CodeBuffer::Grow(size_t new_capacity) {
173 VIXL_ASSERT(managed_);
174 VIXL_ASSERT(new_capacity > capacity_);
175 ptrdiff_t cursor_offset = GetCursorOffset();
176 #ifdef VIXL_CODE_BUFFER_MALLOC
177 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
178 VIXL_CHECK(buffer_ != NULL);
179 #elif defined(VIXL_CODE_BUFFER_MMAP)
180 #ifdef __APPLE__
181 // TODO: Avoid using VIXL_CODE_BUFFER_MMAP.
182 // Don't use false to avoid having the compiler realize it's a noreturn
183 // method.
184 VIXL_ASSERT(!managed_);
185 #else
186 buffer_ = static_cast<byte*>(
187 mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
188 VIXL_CHECK(buffer_ != MAP_FAILED);
189 #endif
190 #else
191 #error Unknown code buffer allocator.
192 #endif
193
194 cursor_ = buffer_ + cursor_offset;
195 capacity_ = new_capacity;
196 }
197
198
199 } // namespace vixl
200