1 /**
2 * Copyright (c) 2021-2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #include "code_allocator.h"
17 #include "mem/base_mem_stats.h"
18 #include "os/mem.h"
19 #include "trace/trace.h"
20
21 #include <securec.h>
22 #include <cstring>
23
24 namespace panda {
25
26 const Alignment CodeAllocator::PAGE_LOG_ALIGN = GetLogAlignment(os::mem::GetPageSize());
27
CodeAllocator(BaseMemStats * mem_stats)28 CodeAllocator::CodeAllocator(BaseMemStats *mem_stats)
29 : arenaAllocator_([&]() {
30 trace::ScopedTrace scoped_trace(__PRETTY_FUNCTION__);
31 // Do not set up mem_stats in internal arena allocator, because we will manage memstats here.
32 return ArenaAllocator(SpaceType::SPACE_TYPE_CODE, nullptr);
33 }()),
34 memStats_(mem_stats),
35 codeRangeStart_(nullptr),
36 codeRangeEnd_(nullptr)
37 {
38 ASSERT(LOG_ALIGN_MIN <= PAGE_LOG_ALIGN && PAGE_LOG_ALIGN <= LOG_ALIGN_MAX);
39 }
40
~CodeAllocator()41 CodeAllocator::~CodeAllocator()
42 {
43 codeRangeStart_ = nullptr;
44 codeRangeEnd_ = nullptr;
45 }
46
AllocateCode(size_t size,const void * code_buff)47 void *CodeAllocator::AllocateCode(size_t size, const void *code_buff)
48 {
49 trace::ScopedTrace scoped_trace("Allocate Code");
50 void *code_ptr = arenaAllocator_.Alloc(size, PAGE_LOG_ALIGN);
51 if (UNLIKELY(code_ptr == nullptr || memcpy_s(code_ptr, size, code_buff, size) != EOK)) {
52 return nullptr;
53 }
54 ProtectCode(os::mem::MapRange<std::byte>(static_cast<std::byte *>(code_ptr), size));
55 memStats_->RecordAllocateRaw(size, SpaceType::SPACE_TYPE_CODE);
56 CodeRangeUpdate(code_ptr, size);
57 return code_ptr;
58 }
59
AllocateCodeUnprotected(size_t size)60 os::mem::MapRange<std::byte> CodeAllocator::AllocateCodeUnprotected(size_t size)
61 {
62 trace::ScopedTrace scoped_trace("Allocate Code");
63 void *code_ptr = arenaAllocator_.Alloc(size, PAGE_LOG_ALIGN);
64 if (UNLIKELY(code_ptr == nullptr)) {
65 return os::mem::MapRange<std::byte>(nullptr, 0);
66 }
67 memStats_->RecordAllocateRaw(size, SpaceType::SPACE_TYPE_CODE);
68 CodeRangeUpdate(code_ptr, size);
69 return os::mem::MapRange<std::byte>(static_cast<std::byte *>(code_ptr), size);
70 }
71
72 /* static */
ProtectCode(os::mem::MapRange<std::byte> mem_range)73 void CodeAllocator::ProtectCode(os::mem::MapRange<std::byte> mem_range)
74 {
75 mem_range.MakeReadExec();
76 }
77
InAllocatedCodeRange(const void * pc)78 bool CodeAllocator::InAllocatedCodeRange(const void *pc)
79 {
80 os::memory::ReadLockHolder rlock(code_range_lock_);
81 return (pc >= codeRangeStart_) && (pc <= codeRangeEnd_);
82 }
83
CodeRangeUpdate(void * ptr,size_t size)84 void CodeAllocator::CodeRangeUpdate(void *ptr, size_t size)
85 {
86 os::memory::WriteLockHolder rwlock(code_range_lock_);
87 if (ptr < codeRangeStart_ || codeRangeStart_ == nullptr) {
88 codeRangeStart_ = ptr;
89 }
90 void *buffer_end = ToVoidPtr(ToUintPtr(ptr) + size);
91 if (buffer_end > codeRangeEnd_ || codeRangeEnd_ == nullptr) {
92 codeRangeEnd_ = buffer_end;
93 }
94 }
95
96 } // namespace panda
97