1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 extern "C" {
28 #include <sys/mman.h>
29 }
30
31 #include "code-buffer-vixl.h"
32 #include "utils-vixl.h"
33
34 namespace vixl {
35
36 // BSD uses `MAP_ANON` instead of the Linux `MAP_ANONYMOUS`. The `MAP_ANONYMOUS`
37 // alias should generally be available, but is not always, so define it manually
38 // if necessary.
39 #if !defined(MAP_ANONYMOUS) && defined(MAP_ANON)
40 #define MAP_ANONYMOUS MAP_ANON
41 #endif
42
CodeBuffer(size_t capacity)43 CodeBuffer::CodeBuffer(size_t capacity)
44 : buffer_(NULL),
45 managed_(true),
46 cursor_(NULL),
47 dirty_(false),
48 capacity_(capacity) {
49 if (capacity_ == 0) {
50 return;
51 }
52 #ifdef VIXL_CODE_BUFFER_MALLOC
53 buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
54 #elif defined(VIXL_CODE_BUFFER_MMAP)
55 buffer_ = reinterpret_cast<byte*>(mmap(NULL,
56 capacity,
57 PROT_READ | PROT_WRITE,
58 MAP_PRIVATE | MAP_ANONYMOUS,
59 -1,
60 0));
61 #else
62 #error Unknown code buffer allocator.
63 #endif
64 VIXL_CHECK(buffer_ != NULL);
65 // Aarch64 instructions must be word aligned, we assert the default allocator
66 // always returns word align memory.
67 VIXL_ASSERT(IsWordAligned(buffer_));
68
69 cursor_ = buffer_;
70 }
71
72
CodeBuffer(byte * buffer,size_t capacity)73 CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
74 : buffer_(reinterpret_cast<byte*>(buffer)),
75 managed_(false),
76 cursor_(reinterpret_cast<byte*>(buffer)),
77 dirty_(false),
78 capacity_(capacity) {
79 VIXL_ASSERT(buffer_ != NULL);
80 }
81
82
~CodeBuffer()83 CodeBuffer::~CodeBuffer() {
84 VIXL_ASSERT(!IsDirty());
85 if (managed_) {
86 #ifdef VIXL_CODE_BUFFER_MALLOC
87 free(buffer_);
88 #elif defined(VIXL_CODE_BUFFER_MMAP)
89 munmap(buffer_, capacity_);
90 #else
91 #error Unknown code buffer allocator.
92 #endif
93 }
94 }
95
96
97 #ifdef VIXL_CODE_BUFFER_MMAP
SetExecutable()98 void CodeBuffer::SetExecutable() {
99 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
100 VIXL_CHECK(ret == 0);
101 }
102 #endif
103
104
105 #ifdef VIXL_CODE_BUFFER_MMAP
SetWritable()106 void CodeBuffer::SetWritable() {
107 int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
108 VIXL_CHECK(ret == 0);
109 }
110 #endif
111
112
EmitString(const char * string)113 void CodeBuffer::EmitString(const char* string) {
114 VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
115 char* dst = reinterpret_cast<char*>(cursor_);
116 dirty_ = true;
117 char* null_char = stpcpy(dst, string);
118 cursor_ = reinterpret_cast<byte*>(null_char) + 1;
119 }
120
121
EmitData(const void * data,size_t size)122 void CodeBuffer::EmitData(const void* data, size_t size) {
123 VIXL_ASSERT(HasSpaceFor(size));
124 dirty_ = true;
125 memcpy(cursor_, data, size);
126 cursor_ = cursor_ + size;
127 }
128
129
UpdateData(size_t offset,const void * data,size_t size)130 void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
131 dirty_ = true;
132 byte* dst = buffer_ + offset;
133 VIXL_ASSERT(dst + size <= cursor_);
134 memcpy(dst, data, size);
135 }
136
137
Align()138 void CodeBuffer::Align() {
139 byte* end = AlignUp(cursor_, 4);
140 const size_t padding_size = end - cursor_;
141 VIXL_ASSERT(padding_size <= 4);
142 EmitZeroedBytes(static_cast<int>(padding_size));
143 }
144
EmitZeroedBytes(int n)145 void CodeBuffer::EmitZeroedBytes(int n) {
146 EnsureSpaceFor(n);
147 dirty_ = true;
148 memset(cursor_, 0, n);
149 cursor_ += n;
150 }
151
Reset()152 void CodeBuffer::Reset() {
153 #ifdef VIXL_DEBUG
154 if (managed_) {
155 // Fill with zeros (there is no useful value common to A32 and T32).
156 memset(buffer_, 0, capacity_);
157 }
158 #endif
159 cursor_ = buffer_;
160 SetClean();
161 }
162
163
Grow(size_t new_capacity)164 void CodeBuffer::Grow(size_t new_capacity) {
165 VIXL_ASSERT(managed_);
166 VIXL_ASSERT(new_capacity > capacity_);
167 ptrdiff_t cursor_offset = GetCursorOffset();
168 #ifdef VIXL_CODE_BUFFER_MALLOC
169 buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
170 VIXL_CHECK(buffer_ != NULL);
171 #elif defined(VIXL_CODE_BUFFER_MMAP)
172 #ifdef __APPLE__
173 // TODO: Avoid using VIXL_CODE_BUFFER_MMAP.
174 // Don't use false to avoid having the compiler realize it's a noreturn
175 // method.
176 VIXL_ASSERT(!managed_);
177 #else
178 buffer_ = static_cast<byte*>(
179 mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
180 VIXL_CHECK(buffer_ != MAP_FAILED);
181 #endif
182 #else
183 #error Unknown code buffer allocator.
184 #endif
185
186 cursor_ = buffer_ + cursor_offset;
187 capacity_ = new_capacity;
188 }
189
190
191 } // namespace vixl
192