• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_
18 #define ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_
19 
20 #include "arch/instruction_set.h"
21 #include "base/locks.h"
22 #include "base/macros.h"
23 #include "base/utils.h"
24 #include "quick/quick_method_frame_info.h"
25 #include "stack_map.h"
26 
27 namespace art {
28 
29 class ArtMethod;
30 
31 // OatQuickMethodHeader precedes the raw code chunk generated by the compiler.
32 class PACKED(4) OatQuickMethodHeader {
33  public:
34   OatQuickMethodHeader(uint32_t code_info_offset = 0) {
35     SetCodeInfoOffset(code_info_offset);
36   }
37 
38   static OatQuickMethodHeader* NterpMethodHeader;
39 
40   bool IsNterpMethodHeader() const;
41 
IsNterpPc(uintptr_t pc)42   static bool IsNterpPc(uintptr_t pc) {
43     return OatQuickMethodHeader::NterpMethodHeader != nullptr &&
44         OatQuickMethodHeader::NterpMethodHeader->Contains(pc);
45   }
46 
FromCodePointer(const void * code_ptr)47   static OatQuickMethodHeader* FromCodePointer(const void* code_ptr) {
48     uintptr_t code = reinterpret_cast<uintptr_t>(code_ptr);
49     uintptr_t header = code - OFFSETOF_MEMBER(OatQuickMethodHeader, code_);
50     DCHECK(IsAlignedParam(code, GetInstructionSetAlignment(kRuntimeISA)) ||
51            IsAlignedParam(header, GetInstructionSetAlignment(kRuntimeISA)))
52         << std::hex << code << " " << std::hex << header;
53     return reinterpret_cast<OatQuickMethodHeader*>(header);
54   }
55 
FromEntryPoint(const void * entry_point)56   static OatQuickMethodHeader* FromEntryPoint(const void* entry_point) {
57     return FromCodePointer(EntryPointToCodePointer(entry_point));
58   }
59 
InstructionAlignedSize()60   static size_t InstructionAlignedSize() {
61     return RoundUp(sizeof(OatQuickMethodHeader), GetInstructionSetAlignment(kRuntimeISA));
62   }
63 
64   OatQuickMethodHeader(const OatQuickMethodHeader&) = default;
65   OatQuickMethodHeader& operator=(const OatQuickMethodHeader&) = default;
66 
NativeQuickPcOffset(const uintptr_t pc)67   uintptr_t NativeQuickPcOffset(const uintptr_t pc) const {
68     return pc - reinterpret_cast<uintptr_t>(GetEntryPoint());
69   }
70 
IsOptimized()71   ALWAYS_INLINE bool IsOptimized() const {
72     uintptr_t code = reinterpret_cast<uintptr_t>(code_);
73     DCHECK_NE(data_, 0u) << std::hex << code;          // Probably a padding of native code.
74     DCHECK_NE(data_, kInvalidData) << std::hex << code;  // Probably a stub or trampoline.
75     return (data_ & kIsCodeInfoMask) != 0;
76   }
77 
GetOptimizedCodeInfoPtr()78   ALWAYS_INLINE const uint8_t* GetOptimizedCodeInfoPtr() const {
79     uint32_t offset = GetCodeInfoOffset();
80     DCHECK_NE(offset, 0u);
81     return code_ - offset;
82   }
83 
GetOptimizedCodeInfoPtr()84   ALWAYS_INLINE uint8_t* GetOptimizedCodeInfoPtr() {
85     uint32_t offset = GetCodeInfoOffset();
86     DCHECK_NE(offset, 0u);
87     return code_ - offset;
88   }
89 
GetCode()90   ALWAYS_INLINE const uint8_t* GetCode() const {
91     return code_;
92   }
93 
GetCodeSize()94   ALWAYS_INLINE uint32_t GetCodeSize() const {
95     return LIKELY(IsOptimized())
96         ? CodeInfo::DecodeCodeSize(GetOptimizedCodeInfoPtr())
97         : (data_ & kCodeSizeMask);
98   }
99 
GetCodeInfoOffset()100   ALWAYS_INLINE uint32_t GetCodeInfoOffset() const {
101     DCHECK(IsOptimized());
102     return data_ & kCodeInfoMask;
103   }
104 
SetCodeInfoOffset(uint32_t offset)105   void SetCodeInfoOffset(uint32_t offset) {
106     data_ = kIsCodeInfoMask | offset;
107     DCHECK_EQ(GetCodeInfoOffset(), offset);
108   }
109 
Contains(uintptr_t pc)110   bool Contains(uintptr_t pc) const {
111     // We should not call `Contains` on a stub or trampoline.
112     DCHECK_NE(data_, kInvalidData) << std::hex << reinterpret_cast<uintptr_t>(code_);
113     // Remove hwasan tag to make comparison below valid. The PC from the stack does not have it.
114     uintptr_t code_start = reinterpret_cast<uintptr_t>(HWASanUntag(code_));
115     static_assert(kRuntimeISA != InstructionSet::kThumb2, "kThumb2 cannot be a runtime ISA");
116     if (kRuntimeISA == InstructionSet::kArm) {
117       // On Thumb-2, the pc is offset by one.
118       code_start++;
119     }
120     return code_start <= pc && pc <= (code_start + GetCodeSize());
121   }
122 
GetEntryPoint()123   const uint8_t* GetEntryPoint() const {
124     // When the runtime architecture is ARM, `kRuntimeISA` is set to `kArm`
125     // (not `kThumb2`), *but* we always generate code for the Thumb-2
126     // instruction set anyway. Thumb-2 requires the entrypoint to be of
127     // offset 1.
128     static_assert(kRuntimeISA != InstructionSet::kThumb2, "kThumb2 cannot be a runtime ISA");
129     return (kRuntimeISA == InstructionSet::kArm)
130         ? reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(code_) | 1)
131         : code_;
132   }
133 
134   template <bool kCheckFrameSize = true>
GetFrameSizeInBytes()135   uint32_t GetFrameSizeInBytes() const {
136     uint32_t result = GetFrameInfo().FrameSizeInBytes();
137     if (kCheckFrameSize) {
138       DCHECK_ALIGNED(result, kStackAlignment);
139     }
140     return result;
141   }
142 
GetFrameInfo()143   QuickMethodFrameInfo GetFrameInfo() const {
144     DCHECK(IsOptimized());
145     return CodeInfo::DecodeFrameInfo(GetOptimizedCodeInfoPtr());
146   }
147 
148   uintptr_t ToNativeQuickPc(ArtMethod* method,
149                             const uint32_t dex_pc,
150                             bool is_for_catch_handler,
151                             bool abort_on_failure = true) const;
152 
153   uint32_t ToDexPc(ArtMethod** frame,
154                    const uintptr_t pc,
155                    bool abort_on_failure = true) const
156       REQUIRES_SHARED(Locks::mutator_lock_);
157 
SetHasShouldDeoptimizeFlag()158   void SetHasShouldDeoptimizeFlag() {
159     DCHECK(!HasShouldDeoptimizeFlag());
160     data_ |= kShouldDeoptimizeMask;
161   }
162 
HasShouldDeoptimizeFlag()163   bool HasShouldDeoptimizeFlag() const {
164     return (data_ & kShouldDeoptimizeMask) != 0;
165   }
166 
167  private:
168   static constexpr uint32_t kShouldDeoptimizeMask = 0x80000000;
169   static constexpr uint32_t kIsCodeInfoMask       = 0x40000000;
170   static constexpr uint32_t kCodeInfoMask         = 0x3FFFFFFF;  // If kIsCodeInfoMask is set.
171   static constexpr uint32_t kCodeSizeMask         = 0x3FFFFFFF;  // If kIsCodeInfoMask is clear.
172 
173   // In order to not confuse a stub with Java-generated code, we prefix each
174   // stub with a 0xFFFFFFFF marker.
175   static constexpr uint32_t kInvalidData = 0xFFFFFFFF;
176 
177   uint32_t data_ = 0u;  // Combination of fields using the above masks.
178   uint8_t code_[0];     // The actual method code.
179 };
180 
181 }  // namespace art
182 
183 #endif  // ART_RUNTIME_OAT_QUICK_METHOD_HEADER_H_
184