• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2017, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 //   * Redistributions of source code must retain the above copyright notice,
8 //     this list of conditions and the following disclaimer.
9 //   * Redistributions in binary form must reproduce the above copyright notice,
10 //     this list of conditions and the following disclaimer in the documentation
11 //     and/or other materials provided with the distribution.
12 //   * Neither the name of ARM Limited nor the names of its contributors may be
13 //     used to endorse or promote products derived from this software without
14 //     specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 
27 #include "code-buffer-vixl.h"
28 #include "utils-vixl.h"
29 
30 namespace vixl {
31 
CodeBuffer(size_t capacity)32 CodeBuffer::CodeBuffer(size_t capacity)
33     : buffer_(NULL),
34       managed_(true),
35       cursor_(NULL),
36       dirty_(false),
37       capacity_(capacity) {
38   if (capacity_ == 0) {
39     return;
40   }
41 #ifdef VIXL_CODE_BUFFER_MALLOC
42   buffer_ = reinterpret_cast<byte*>(malloc(capacity_));
43 #elif defined(VIXL_CODE_BUFFER_MMAP)
44   buffer_ = reinterpret_cast<byte*>(mmap(NULL,
45                                          capacity,
46                                          PROT_READ | PROT_WRITE,
47                                          MAP_PRIVATE | MAP_ANONYMOUS,
48                                          -1,
49                                          0));
50 #else
51 #error Unknown code buffer allocator.
52 #endif
53   VIXL_CHECK(buffer_ != NULL);
54   // Aarch64 instructions must be word aligned, we assert the default allocator
55   // always returns word align memory.
56   if (buffer_ != MAP_FAILED) {
57       VIXL_ASSERT(IsWordAligned(buffer_));
58       cursor_ = buffer_;
59   }
60 }
61 
62 
CodeBuffer(byte * buffer,size_t capacity)63 CodeBuffer::CodeBuffer(byte* buffer, size_t capacity)
64     : buffer_(reinterpret_cast<byte*>(buffer)),
65       managed_(false),
66       cursor_(reinterpret_cast<byte*>(buffer)),
67       dirty_(false),
68       capacity_(capacity) {
69   VIXL_ASSERT(buffer_ != NULL);
70 }
71 
72 
~CodeBuffer()73 CodeBuffer::~CodeBuffer() VIXL_NEGATIVE_TESTING_ALLOW_EXCEPTION {
74   // VIXL_ASSERT(!IsDirty()); // Use own allocator - not applied
75   if (managed_) {
76 #ifdef VIXL_CODE_BUFFER_MALLOC
77     free(buffer_);
78 #elif defined(VIXL_CODE_BUFFER_MMAP)
79     if (buffer_ != MAP_FAILED) {
80         [[maybe_unused]] int res = munmap(buffer_, capacity_);
81         // Success unmap position
82         VIXL_ASSERT(res == 0);
83         if ((mmap_max_ != 0) && (capacity_ > mmap_max_)) {
84           // Force crash - allocated too much
85           printf(" Allocated too much memory.\n");
86           VIXL_UNREACHABLE();
87         }
88     }
89 #else
90 #error Unknown code buffer allocator.
91 #endif
92   }
93 }
94 
95 
SetExecutable()96 void CodeBuffer::SetExecutable() {
97 #ifdef VIXL_CODE_BUFFER_MMAP
98   int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_EXEC);
99   VIXL_CHECK(ret == 0);
100 #else
101   // This requires page-aligned memory blocks, which we can only guarantee with
102   // mmap.
103   VIXL_UNIMPLEMENTED();
104 #endif
105 }
106 
107 
SetWritable()108 void CodeBuffer::SetWritable() {
109 #ifdef VIXL_CODE_BUFFER_MMAP
110   int ret = mprotect(buffer_, capacity_, PROT_READ | PROT_WRITE);
111   VIXL_CHECK(ret == 0);
112 #else
113   // This requires page-aligned memory blocks, which we can only guarantee with
114   // mmap.
115   VIXL_UNIMPLEMENTED();
116 #endif
117 }
118 
119 // For some reason OHOS toolchain doesn't have this function
120 #ifdef PANDA_TARGET_MOBILE
stpcpy(char * dst,const char * src)121 char* stpcpy (char *dst, const char *src) {
122     const size_t len = strlen (src);
123     return (char *) memcpy (dst, src, len + 1) + len;
124 }
125 #endif
126 
EmitString(const char * string)127 void CodeBuffer::EmitString(const char* string) {
128   VIXL_ASSERT(HasSpaceFor(strlen(string) + 1));
129   char* dst = reinterpret_cast<char*>(cursor_);
130   dirty_ = true;
131   char* null_char = stpcpy(dst, string);
132   cursor_ = reinterpret_cast<byte*>(null_char) + 1;
133 }
134 
135 
EmitData(const void * data,size_t size)136 void CodeBuffer::EmitData(const void* data, size_t size) {
137   VIXL_ASSERT(HasSpaceFor(size));
138   dirty_ = true;
139   memcpy(cursor_, data, size);
140   cursor_ = cursor_ + size;
141 }
142 
143 
UpdateData(size_t offset,const void * data,size_t size)144 void CodeBuffer::UpdateData(size_t offset, const void* data, size_t size) {
145   dirty_ = true;
146   byte* dst = buffer_ + offset;
147   VIXL_ASSERT(dst + size <= cursor_);
148   memcpy(dst, data, size);
149 }
150 
151 
Align()152 void CodeBuffer::Align() {
153   byte* end = AlignUp(cursor_, 4);
154   const size_t padding_size = end - cursor_;
155   VIXL_ASSERT(padding_size <= 4);
156   EmitZeroedBytes(static_cast<int>(padding_size));
157 }
158 
EmitZeroedBytes(int n)159 void CodeBuffer::EmitZeroedBytes(int n) {
160   EnsureSpaceFor(n);
161   dirty_ = true;
162   memset(cursor_, 0, n);
163   cursor_ += n;
164 }
165 
Reset()166 void CodeBuffer::Reset() {
167 #ifdef VIXL_DEBUG
168   if (managed_) {
169     // Fill with zeros (there is no useful value common to A32 and T32).
170     memset(buffer_, 0, capacity_);
171   }
172 #endif
173   cursor_ = buffer_;
174   SetClean();
175 }
176 
177 
Grow(size_t new_capacity)178 void CodeBuffer::Grow(size_t new_capacity) {
179   VIXL_ASSERT(managed_);
180   VIXL_ASSERT(new_capacity > capacity_);
181   ptrdiff_t cursor_offset = GetCursorOffset();
182     VIXL_ASSERT(false);
183     // Do not support grow with our allocators
184 #ifdef VIXL_CODE_BUFFER_MALLOC
185   buffer_ = static_cast<byte*>(realloc(buffer_, new_capacity));
186   VIXL_CHECK(buffer_ != NULL);
187 #elif defined(VIXL_CODE_BUFFER_MMAP)
188   buffer_ = static_cast<byte*>(
189       mremap(buffer_, capacity_, new_capacity, MREMAP_MAYMOVE));
190   VIXL_CHECK(buffer_ != MAP_FAILED);
191   if ((mmap_max_ != 0) && (new_capacity > mmap_max_)) {
192     // Force crash - allocated too much
193     printf(" Allocated too much memory.\n");
194     VIXL_UNREACHABLE();
195   }
196 #else
197 #error Unknown code buffer allocator.
198 #endif
199   cursor_ = buffer_ + cursor_offset;
200   capacity_ = new_capacity;
201 }
202 
203 
204 }  // namespace vixl
205