1 /* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_ 18 #define ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_ 19 20 #include "arch/instruction_set.h" 21 #include "base/macros.h" 22 #include "quick/quick_method_frame_info.h" 23 #include "method_info.h" 24 #include "stack_map.h" 25 #include "utils.h" 26 27 namespace art { 28 29 class ArtMethod; 30 31 // OatQuickMethodHeader precedes the raw code chunk generated by the compiler. 32 class PACKED(4) OatQuickMethodHeader { 33 public: 34 OatQuickMethodHeader() = default; 35 explicit OatQuickMethodHeader(uint32_t vmap_table_offset, 36 uint32_t method_info_offset, 37 uint32_t frame_size_in_bytes, 38 uint32_t core_spill_mask, 39 uint32_t fp_spill_mask, 40 uint32_t code_size); 41 42 ~OatQuickMethodHeader(); 43 FromCodePointer(const void * code_ptr)44 static OatQuickMethodHeader* FromCodePointer(const void* code_ptr) { 45 uintptr_t code = reinterpret_cast<uintptr_t>(code_ptr); 46 uintptr_t header = code - OFFSETOF_MEMBER(OatQuickMethodHeader, code_); 47 DCHECK(IsAlignedParam(code, GetInstructionSetAlignment(kRuntimeISA)) || 48 IsAlignedParam(header, GetInstructionSetAlignment(kRuntimeISA))) 49 << std::hex << code << " " << std::hex << header; 50 return reinterpret_cast<OatQuickMethodHeader*>(header); 51 } 52 FromEntryPoint(const void * entry_point)53 static OatQuickMethodHeader* FromEntryPoint(const void* entry_point) { 54 return FromCodePointer(EntryPointToCodePointer(entry_point)); 55 } 56 57 OatQuickMethodHeader(const OatQuickMethodHeader&) = default; 58 OatQuickMethodHeader& operator=(const OatQuickMethodHeader&) = default; 59 NativeQuickPcOffset(const uintptr_t pc)60 uintptr_t NativeQuickPcOffset(const uintptr_t pc) const { 61 return pc - reinterpret_cast<uintptr_t>(GetEntryPoint()); 62 } 63 IsOptimized()64 bool IsOptimized() const { 65 return GetCodeSize() != 0 && vmap_table_offset_ != 0; 66 } 67 GetOptimizedCodeInfoPtr()68 const void* GetOptimizedCodeInfoPtr() const { 69 DCHECK(IsOptimized()); 70 return reinterpret_cast<const void*>(code_ - vmap_table_offset_); 71 } 72 GetOptimizedCodeInfoPtr()73 uint8_t* GetOptimizedCodeInfoPtr() { 74 DCHECK(IsOptimized()); 75 return code_ - vmap_table_offset_; 76 } 77 GetOptimizedCodeInfo()78 CodeInfo GetOptimizedCodeInfo() const { 79 return CodeInfo(GetOptimizedCodeInfoPtr()); 80 } 81 GetOptimizedMethodInfoPtr()82 const void* GetOptimizedMethodInfoPtr() const { 83 DCHECK(IsOptimized()); 84 return reinterpret_cast<const void*>(code_ - method_info_offset_); 85 } 86 GetOptimizedMethodInfoPtr()87 uint8_t* GetOptimizedMethodInfoPtr() { 88 DCHECK(IsOptimized()); 89 return code_ - method_info_offset_; 90 } 91 GetOptimizedMethodInfo()92 MethodInfo GetOptimizedMethodInfo() const { 93 return MethodInfo(reinterpret_cast<const uint8_t*>(GetOptimizedMethodInfoPtr())); 94 } 95 GetCode()96 const uint8_t* GetCode() const { 97 return code_; 98 } 99 GetCodeSize()100 uint32_t GetCodeSize() const { 101 return code_size_ & kCodeSizeMask; 102 } 103 GetCodeSizeAddr()104 const uint32_t* GetCodeSizeAddr() const { 105 return &code_size_; 106 } 107 GetVmapTableOffset()108 uint32_t GetVmapTableOffset() const { 109 return vmap_table_offset_; 110 } 111 SetVmapTableOffset(uint32_t offset)112 void SetVmapTableOffset(uint32_t offset) { 113 vmap_table_offset_ = offset; 114 } 115 GetVmapTableOffsetAddr()116 const uint32_t* GetVmapTableOffsetAddr() const { 117 return &vmap_table_offset_; 118 } 119 GetMethodInfoOffset()120 uint32_t GetMethodInfoOffset() const { 121 return method_info_offset_; 122 } 123 SetMethodInfoOffset(uint32_t offset)124 void SetMethodInfoOffset(uint32_t offset) { 125 method_info_offset_ = offset; 126 } 127 GetMethodInfoOffsetAddr()128 const uint32_t* GetMethodInfoOffsetAddr() const { 129 return &method_info_offset_; 130 } 131 GetVmapTable()132 const uint8_t* GetVmapTable() const { 133 CHECK(!IsOptimized()) << "Unimplemented vmap table for optimizing compiler"; 134 return (vmap_table_offset_ == 0) ? nullptr : code_ - vmap_table_offset_; 135 } 136 Contains(uintptr_t pc)137 bool Contains(uintptr_t pc) const { 138 uintptr_t code_start = reinterpret_cast<uintptr_t>(code_); 139 static_assert(kRuntimeISA != kThumb2, "kThumb2 cannot be a runtime ISA"); 140 if (kRuntimeISA == kArm) { 141 // On Thumb-2, the pc is offset by one. 142 code_start++; 143 } 144 return code_start <= pc && pc <= (code_start + GetCodeSize()); 145 } 146 GetEntryPoint()147 const uint8_t* GetEntryPoint() const { 148 // When the runtime architecture is ARM, `kRuntimeISA` is set to `kArm` 149 // (not `kThumb2`), *but* we always generate code for the Thumb-2 150 // instruction set anyway. Thumb-2 requires the entrypoint to be of 151 // offset 1. 152 static_assert(kRuntimeISA != kThumb2, "kThumb2 cannot be a runtime ISA"); 153 return (kRuntimeISA == kArm) 154 ? reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(code_) | 1) 155 : code_; 156 } 157 158 template <bool kCheckFrameSize = true> GetFrameSizeInBytes()159 uint32_t GetFrameSizeInBytes() const { 160 uint32_t result = frame_info_.FrameSizeInBytes(); 161 if (kCheckFrameSize) { 162 DCHECK_ALIGNED(result, kStackAlignment); 163 } 164 return result; 165 } 166 GetFrameInfo()167 QuickMethodFrameInfo GetFrameInfo() const { 168 return frame_info_; 169 } 170 171 uintptr_t ToNativeQuickPc(ArtMethod* method, 172 const uint32_t dex_pc, 173 bool is_for_catch_handler, 174 bool abort_on_failure = true) const; 175 176 uint32_t ToDexPc(ArtMethod* method, const uintptr_t pc, bool abort_on_failure = true) const; 177 SetHasShouldDeoptimizeFlag()178 void SetHasShouldDeoptimizeFlag() { 179 DCHECK_EQ(code_size_ & kShouldDeoptimizeMask, 0u); 180 code_size_ |= kShouldDeoptimizeMask; 181 } 182 HasShouldDeoptimizeFlag()183 bool HasShouldDeoptimizeFlag() const { 184 return (code_size_ & kShouldDeoptimizeMask) != 0; 185 } 186 187 private: 188 static constexpr uint32_t kShouldDeoptimizeMask = 0x80000000; 189 static constexpr uint32_t kCodeSizeMask = ~kShouldDeoptimizeMask; 190 191 // The offset in bytes from the start of the vmap table to the end of the header. 192 uint32_t vmap_table_offset_ = 0u; 193 // The offset in bytes from the start of the method info to the end of the header. 194 // The method info offset is not in the CodeInfo since CodeInfo has good dedupe properties that 195 // would be lost from doing so. The method info memory region contains method indices since they 196 // are hard to dedupe. 197 uint32_t method_info_offset_ = 0u; 198 // The stack frame information. 199 QuickMethodFrameInfo frame_info_; 200 // The code size in bytes. The highest bit is used to signify if the compiled 201 // code with the method header has should_deoptimize flag. 202 uint32_t code_size_ = 0u; 203 // The actual code. 204 uint8_t code_[0]; 205 }; 206 207 } // namespace art 208 209 #endif // ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_ 210