1 /* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_ 18 #define ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_ 19 20 #include <stdint.h> 21 #include <memory> 22 #include <vector> 23 24 #include <android-base/logging.h> 25 26 #include "base/arena_containers.h" 27 #include "base/macros.h" 28 #include "offsets.h" 29 #include "utils/arm64/managed_register_arm64.h" 30 #include "utils/assembler.h" 31 32 // TODO(VIXL): Make VIXL compile with -Wshadow. 33 #pragma GCC diagnostic push 34 #pragma GCC diagnostic ignored "-Wshadow" 35 #include "aarch64/disasm-aarch64.h" 36 #include "aarch64/macro-assembler-aarch64.h" 37 #pragma GCC diagnostic pop 38 39 namespace art { 40 41 class Arm64InstructionSetFeatures; 42 43 namespace arm64 { 44 45 #define MEM_OP(...) vixl::aarch64::MemOperand(__VA_ARGS__) 46 47 enum LoadOperandType { 48 kLoadSignedByte, 49 kLoadUnsignedByte, 50 kLoadSignedHalfword, 51 kLoadUnsignedHalfword, 52 kLoadWord, 53 kLoadCoreWord, 54 kLoadSWord, 55 kLoadDWord 56 }; 57 58 enum StoreOperandType { 59 kStoreByte, 60 kStoreHalfword, 61 kStoreWord, 62 kStoreCoreWord, 63 kStoreSWord, 64 kStoreDWord 65 }; 66 67 class Arm64Assembler final : public Assembler { 68 public: 69 explicit Arm64Assembler( 70 ArenaAllocator* allocator, const Arm64InstructionSetFeatures* features = nullptr); 71 ~Arm64Assembler()72 virtual ~Arm64Assembler() {} 73 GetVIXLAssembler()74 vixl::aarch64::MacroAssembler* GetVIXLAssembler() { return &vixl_masm_; } 75 76 // Finalize the code. 77 void FinalizeCode() override; 78 79 // Size of generated code. 80 size_t CodeSize() const override; 81 const uint8_t* CodeBufferBaseAddress() const override; 82 83 // Copy instructions out of assembly buffer into the given region of memory. 84 void FinalizeInstructions(const MemoryRegion& region) override; 85 86 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs); 87 88 void SpillRegisters(vixl::aarch64::CPURegList registers, int offset); 89 void UnspillRegisters(vixl::aarch64::CPURegList registers, int offset); 90 91 // Jump to address (not setting link register) 92 void JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch); 93 94 // 95 // Heap poisoning. 96 // 97 98 // Poison a heap reference contained in `reg`. 99 void PoisonHeapReference(vixl::aarch64::Register reg); 100 // Unpoison a heap reference contained in `reg`. 101 void UnpoisonHeapReference(vixl::aarch64::Register reg); 102 // Poison a heap reference contained in `reg` if heap poisoning is enabled. 103 void MaybePoisonHeapReference(vixl::aarch64::Register reg); 104 // Unpoison a heap reference contained in `reg` if heap poisoning is enabled. 105 void MaybeUnpoisonHeapReference(vixl::aarch64::Register reg); 106 107 // Emit code checking the status of the Marking Register, and aborting 108 // the program if MR does not match the value stored in the art::Thread 109 // object. 110 // 111 // Argument `temp` is used as a temporary register to generate code. 112 // Argument `code` is used to identify the different occurrences of 113 // MaybeGenerateMarkingRegisterCheck and is passed to the BRK instruction. 114 void GenerateMarkingRegisterCheck(vixl::aarch64::Register temp, int code = 0); 115 Bind(Label * label ATTRIBUTE_UNUSED)116 void Bind(Label* label ATTRIBUTE_UNUSED) override { 117 UNIMPLEMENTED(FATAL) << "Do not use Bind for ARM64"; 118 } Jump(Label * label ATTRIBUTE_UNUSED)119 void Jump(Label* label ATTRIBUTE_UNUSED) override { 120 UNIMPLEMENTED(FATAL) << "Do not use Jump for ARM64"; 121 } 122 reg_x(int code)123 static vixl::aarch64::Register reg_x(int code) { 124 CHECK(code < kNumberOfXRegisters) << code; 125 if (code == SP) { 126 return vixl::aarch64::sp; 127 } else if (code == XZR) { 128 return vixl::aarch64::xzr; 129 } 130 return vixl::aarch64::Register::GetXRegFromCode(code); 131 } 132 reg_w(int code)133 static vixl::aarch64::Register reg_w(int code) { 134 CHECK(code < kNumberOfWRegisters) << code; 135 if (code == WSP) { 136 return vixl::aarch64::wsp; 137 } else if (code == WZR) { 138 return vixl::aarch64::wzr; 139 } 140 return vixl::aarch64::Register::GetWRegFromCode(code); 141 } 142 reg_d(int code)143 static vixl::aarch64::FPRegister reg_d(int code) { 144 return vixl::aarch64::FPRegister::GetDRegFromCode(code); 145 } 146 reg_s(int code)147 static vixl::aarch64::FPRegister reg_s(int code) { 148 return vixl::aarch64::FPRegister::GetSRegFromCode(code); 149 } 150 151 private: 152 // VIXL assembler. 153 vixl::aarch64::MacroAssembler vixl_masm_; 154 155 // Used for testing. 156 friend class Arm64ManagedRegister_VixlRegisters_Test; 157 }; 158 159 } // namespace arm64 160 } // namespace art 161 162 #endif // ART_COMPILER_UTILS_ARM64_ASSEMBLER_ARM64_H_ 163