1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 #ifdef VIXL_CODE_BUFFER_MMAP
28 extern "C" {
29 #include <sys/mman.h>
30 }
31 #endif
32
33 #include "code-buffer-vixl.h"
34 #include "utils-vixl.h"
35
36 namespace vixl {
37
38 // BSD uses `MAP_ANON` instead of the Linux `MAP_ANONYMOUS`. The `MAP_ANONYMOUS`
39 // alias should generally be available, but is not always, so define it manually
40 // if necessary.
41 #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
42 #define MAP_ANONYMOUS MAP_ANON
43 #endif
44
CodeBuffer(size_t capacity)45 CodeBuffer::CodeBuffer(size_t capacity)
46 : buffer_(NULL),
47 managed_(true),
48 cursor_(NULL),
49 dirty_(false),
50 capacity_(capacity) {
51 if (capacity_ == 0) {
52 return;
53 }
54 #ifdef VIXL_CODE_BUFFER_MALLOC
55 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
56 #elif defined(VIXL_CODE_BUFFER_MMAP)
57 buffer_ = reinterpret_cast<byte*>(mmap(NULL,
58 capacity,
59 PROT_READ | PROT_WRITE,
60 MAP_PRIVATE | MAP_ANONYMOUS,
61 -1,
62 0));
63 #else
64 #error Unknown code buffer allocator.
65 #endif
66 VIXL_CHECK(buffer_ != NULL);
67 // Aarch64 instructions must be word aligned, we assert the default allocator
68 // always returns word align memory.
69 VIXL_ASSERT(IsWordAligned(buffer_));
70
71 cursor_ = buffer_;
72 }
73
74
CodeBuffer(byte * buffer,size_t capacity)75 CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
76 : buffer_(reinterpret_cast<byte*>(buffer)),
77 managed_(false),
78 cursor_(reinterpret_cast<byte*>(buffer)),
79 dirty_(false),
80 capacity_(capacity) {
81 VIXL_ASSERT(buffer_ != NULL);
82 }
83
84
~CodeBuffer()85 CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION {
86 VIXL_ASSERT(!IsDirty());
87 if (managed_) {
88 #ifdef VIXL_CODE_BUFFER_MALLOC
89 free(buffer_);
90 #elif defined(VIXL_CODE_BUFFER_MMAP)
91 munmap(buffer_, capacity_);
92 #else
93 #error Unknown code buffer allocator.
94 #endif
95 }
96 }
97
98
SetExecutable()99 void CodeBuffer::SetExecutable() {
100 #ifdef VIXL_CODE_BUFFER_MMAP
101 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
102 VIXL_CHECK(ret == 0);
103 #else
104 // This requires page-aligned memory blocks, which we can only guarantee with
105 // mmap.
106 VIXL_UNIMPLEMENTED();
107 #endif
108 }
109
110
SetWritable()111 void CodeBuffer::SetWritable() {
112 #ifdef VIXL_CODE_BUFFER_MMAP
113 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
114 VIXL_CHECK(ret == 0);
115 #else
116 // This requires page-aligned memory blocks, which we can only guarantee with
117 // mmap.
118 VIXL_UNIMPLEMENTED();
119 #endif
120 }
121
122
EmitString(const char * string)123 void CodeBuffer::EmitString(const char* string) {
124 const auto len = strlen(string) + 1;
125 VIXL_ASSERT(HasSpaceFor(len));
126 char* dst = reinterpret_cast<char*>(cursor_);
127 dirty_ = true;
128 memcpy(dst, string, len);
129 cursor_ = reinterpret_cast<byte*>(dst + len);
130 }
131
132
EmitData(const void * data,size_t size)133 void CodeBuffer::EmitData(const void* data, size_t size) {
134 VIXL_ASSERT(HasSpaceFor(size));
135 dirty_ = true;
136 memcpy(cursor_, data, size);
137 cursor_ = cursor_ + size;
138 }
139
140
UpdateData(size_t offset,const void * data,size_t size)141 void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
142 dirty_ = true;
143 byte* dst = buffer_ + offset;
144 VIXL_ASSERT(dst + size <= cursor_);
145 memcpy(dst, data, size);
146 }
147
148
Align()149 void CodeBuffer::Align() {
150 byte* end = AlignUp(cursor_, 4);
151 const size_t padding_size = end - cursor_;
152 VIXL_ASSERT(padding_size <= 4);
153 EmitZeroedBytes(static_cast<int>(padding_size));
154 }
155
EmitZeroedBytes(int n)156 void CodeBuffer::EmitZeroedBytes(int n) {
157 EnsureSpaceFor(n);
158 dirty_ = true;
159 memset(cursor_, 0, n);
160 cursor_ += n;
161 }
162
Reset()163 void CodeBuffer::Reset() {
164 #ifdef VIXL_DEBUG
165 if (managed_) {
166 // Fill with zeros (there is no useful value common to A32 and T32).
167 memset(buffer_, 0, capacity_);
168 }
169 #endif
170 cursor_ = buffer_;
171 SetClean();
172 }
173
174
Grow(size_t new_capacity)175 void CodeBuffer::Grow(size_t new_capacity) {
176 VIXL_ASSERT(managed_);
177 VIXL_ASSERT(new_capacity > capacity_);
178 ptrdiff_t cursor_offset = GetCursorOffset();
179 #ifdef VIXL_CODE_BUFFER_MALLOC
180 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
181 VIXL_CHECK(buffer_ != NULL);
182 #elif defined(VIXL_CODE_BUFFER_MMAP)
183 #ifdef __APPLE__
184 // TODO: Avoid using VIXL_CODE_BUFFER_MMAP.
185 // Don't use false to avoid having the compiler realize it's a noreturn
186 // method.
187 VIXL_ASSERT(!managed_);
188 #else
189 buffer_ = static_cast<byte*>(
190 mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
191 VIXL_CHECK(buffer_ != MAP_FAILED);
192 #endif
193 #else
194 #error Unknown code buffer allocator.
195 #endif
196
197 cursor_ = buffer_ + cursor_offset;
198 capacity_ = new_capacity;
199 }
200
201
202 } // namespace vixl
203