1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "code_allocator.h"
17 #include "trace/trace.h"
18
19 namespace panda {
20
21 const Alignment CodeAllocator::PAGE_LOG_ALIGN = GetLogAlignment(os::mem::GetPageSize());
22
CodeAllocator(BaseMemStats * mem_stats)23 CodeAllocator::CodeAllocator(BaseMemStats *mem_stats)
24 : arenaAllocator_([&]() {
25 trace::ScopedTrace scoped_trace(__PRETTY_FUNCTION__);
26 // Do not set up mem_stats in internal arena allocator, because we will manage memstats here.
27 return ArenaAllocator(SpaceType::SPACE_TYPE_CODE, nullptr);
28 }()),
29 memStats_(mem_stats),
30 codeRangeStart_(nullptr),
31 codeRangeEnd_(nullptr)
32 {
33 ASSERT(LOG_ALIGN_MIN <= PAGE_LOG_ALIGN && PAGE_LOG_ALIGN <= LOG_ALIGN_MAX);
34 }
35
~CodeAllocator()36 CodeAllocator::~CodeAllocator()
37 {
38 codeRangeStart_ = nullptr;
39 codeRangeEnd_ = nullptr;
40 }
41
AllocateCode(size_t size,const void * code_buff)42 void *CodeAllocator::AllocateCode(size_t size, const void *code_buff)
43 {
44 trace::ScopedTrace scoped_trace("Allocate Code");
45 void *code_ptr = arenaAllocator_.Alloc(size, PAGE_LOG_ALIGN);
46 if (UNLIKELY(code_ptr == nullptr || memcpy_s(code_ptr, size, code_buff, size) != EOK)) {
47 return nullptr;
48 }
49 ProtectCode(os::mem::MapRange<std::byte>(static_cast<std::byte *>(code_ptr), size));
50 memStats_->RecordAllocateRaw(size, SpaceType::SPACE_TYPE_CODE);
51 CodeRangeUpdate(code_ptr, size);
52 return code_ptr;
53 }
54
AllocateCodeUnprotected(size_t size)55 os::mem::MapRange<std::byte> CodeAllocator::AllocateCodeUnprotected(size_t size)
56 {
57 trace::ScopedTrace scoped_trace("Allocate Code");
58 void *code_ptr = arenaAllocator_.Alloc(size, PAGE_LOG_ALIGN);
59 if (UNLIKELY(code_ptr == nullptr)) {
60 return os::mem::MapRange<std::byte>(nullptr, 0);
61 }
62 memStats_->RecordAllocateRaw(size, SpaceType::SPACE_TYPE_CODE);
63 CodeRangeUpdate(code_ptr, size);
64 return os::mem::MapRange<std::byte>(static_cast<std::byte *>(code_ptr), size);
65 }
66
67 /* static */
ProtectCode(os::mem::MapRange<std::byte> mem_range)68 void CodeAllocator::ProtectCode(os::mem::MapRange<std::byte> mem_range)
69 {
70 mem_range.MakeReadExec();
71 }
72
InAllocatedCodeRange(const void * pc)73 bool CodeAllocator::InAllocatedCodeRange(const void *pc)
74 {
75 os::memory::ReadLockHolder rlock(code_range_lock_);
76 return (pc >= codeRangeStart_) && (pc <= codeRangeEnd_);
77 }
78
CodeRangeUpdate(void * ptr,size_t size)79 void CodeAllocator::CodeRangeUpdate(void *ptr, size_t size)
80 {
81 os::memory::WriteLockHolder rwlock(code_range_lock_);
82 if (ptr < codeRangeStart_ || codeRangeStart_ == nullptr) {
83 codeRangeStart_ = ptr;
84 }
85 void *buffer_end = ToVoidPtr(ToUintPtr(ptr) + size);
86 if (buffer_end > codeRangeEnd_ || codeRangeEnd_ == nullptr) {
87 codeRangeEnd_ = buffer_end;
88 }
89 }
90
91 } // namespace panda
92