1 // Copyright 2020, VIXL authors 2 // All rights reserved. 3 // 4 // Redistribution and use in source and binary forms, with or without 5 // modification, are permitted provided that the following conditions are met: 6 // 7 // * Redistributions of source code must retain the above copyright notice, 8 // this list of conditions and the following disclaimer. 9 // * Redistributions in binary form must reproduce the above copyright notice, 10 // this list of conditions and the following disclaimer in the documentation 11 // and/or other materials provided with the distribution. 12 // * Neither the name of ARM Limited nor the names of its contributors may be 13 // used to endorse or promote products derived from this software without 14 // specific prior written permission. 15 // 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND 17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 27 #ifndef VIXL_EXAMPLE_EXECUTABLE_MEMORY_H_ 28 #define VIXL_EXAMPLE_EXECUTABLE_MEMORY_H_ 29 30 extern "C" { 31 #include <stdint.h> 32 #ifndef VIXL_INCLUDE_SIMULATOR_AARCH64 33 #include <sys/mman.h> 34 #endif 35 } 36 37 #include <cstdio> 38 #include <string> 39 40 #include "aarch64/assembler-aarch64.h" 41 #include "aarch64/constants-aarch64.h" 42 #include "aarch64/cpu-aarch64.h" 43 #include "aarch64/macro-assembler-aarch64.h" 44 45 #ifndef VIXL_INCLUDE_SIMULATOR_AARCH64 46 class ExecutableMemory { 47 public: ExecutableMemory(const vixl::byte * code_start,size_t size)48 ExecutableMemory(const vixl::byte* code_start, size_t size) 49 : size_(size), 50 buffer_(reinterpret_cast<vixl::byte*>(mmap(NULL, 51 size, 52 PROT_READ | PROT_WRITE, 53 MAP_SHARED | MAP_ANONYMOUS, 54 -1, 55 0))) { 56 VIXL_CHECK(buffer_ != MAP_FAILED); 57 memcpy(buffer_, code_start, size_); 58 59 vixl::aarch64::CPU::EnsureIAndDCacheCoherency(buffer_, size_); 60 int res = mprotect(buffer_, size_, PROT_READ | PROT_EXEC); 61 VIXL_CHECK(res == 0); 62 } ~ExecutableMemory()63 ~ExecutableMemory() { munmap(buffer_, size_); } 64 65 template <typename T> GetEntryPoint(const vixl::aarch64::Label & entry_point)66 T GetEntryPoint(const vixl::aarch64::Label& entry_point) const { 67 int64_t location = entry_point.GetLocation(); 68 return GetOffsetAddress<T>(location); 69 } 70 71 private: 72 template <typename T> GetOffsetAddress(int64_t offset)73 T GetOffsetAddress(int64_t offset) const { 74 VIXL_ASSERT((offset >= 0) && (static_cast<size_t>(offset) <= size_)); 75 T function_address; 76 vixl::byte* buffer_address = buffer_ + offset; 77 78 VIXL_STATIC_ASSERT(sizeof(T) == sizeof(buffer_address)); 79 memcpy(&function_address, &buffer_address, sizeof(T)); 80 return function_address; 81 } 82 83 size_t size_; 84 vixl::byte* buffer_; 85 }; 86 #endif 87 88 #endif // VIXL_EXAMPLE_EXECUTABLE_MEMORY_H_ 89