• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2021 Huawei Device Co., Ltd.
3  * Licensed under the Apache License, Version 2.0 (the "License");
4  * you may not use this file except in compliance with the License.
5  * You may obtain a copy of the License at
6  *
7  * http://www.apache.org/licenses/LICENSE-2.0
8  *
9  * Unless required by applicable law or agreed to in writing, software
10  * distributed under the License is distributed on an "AS IS" BASIS,
11  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12  * See the License for the specific language governing permissions and
13  * limitations under the License.
14  */
15 
16 #include "code_allocator.h"
17 #include "os/mem.h"
18 #include "trace/trace.h"
19 #include "mem/base_mem_stats.h"
20 #include "trace/trace.h"
21 
22 #include <securec.h>
23 #include <cstring>
24 
25 namespace panda {
26 
27 const Alignment CodeAllocator::PAGE_LOG_ALIGN = GetLogAlignment(os::mem::GetPageSize());
28 
CodeAllocator(BaseMemStats * mem_stats)29 CodeAllocator::CodeAllocator(BaseMemStats *mem_stats)
30     : arenaAllocator_([&]() {
31           trace::ScopedTrace scoped_trace(__PRETTY_FUNCTION__);
32           // Do not set up mem_stats in internal arena allocator, because we will manage memstats here.
33           return ArenaAllocator(SpaceType::SPACE_TYPE_CODE, nullptr);
34       }()),
35       memStats_(mem_stats),
36       codeRangeStart_(nullptr),
37       codeRangeEnd_(nullptr)
38 {
39     ASSERT(PAGE_LOG_ALIGN >= LOG_ALIGN_MIN);
40     ASSERT(PAGE_LOG_ALIGN <= LOG_ALIGN_MAX);
41 }
42 
~CodeAllocator()43 CodeAllocator::~CodeAllocator()
44 {
45     codeRangeStart_ = nullptr;
46     codeRangeEnd_ = nullptr;
47 }
48 
AllocateCode(size_t size,const void * code_buff)49 void *CodeAllocator::AllocateCode(size_t size, const void *code_buff)
50 {
51     trace::ScopedTrace scoped_trace("Allocate Code");
52     void *code_ptr = arenaAllocator_.Alloc(size, PAGE_LOG_ALIGN);
53     if (UNLIKELY(code_ptr == nullptr || memcpy_s(code_ptr, size, code_buff, size) != EOK)) {
54         return nullptr;
55     }
56     ProtectCode(os::mem::MapRange<std::byte>(static_cast<std::byte *>(code_ptr), size));
57     memStats_->RecordAllocateRaw(size, SpaceType::SPACE_TYPE_CODE);
58     CodeRangeUpdate(code_ptr, size);
59     return code_ptr;
60 }
61 
AllocateCodeUnprotected(size_t size)62 os::mem::MapRange<std::byte> CodeAllocator::AllocateCodeUnprotected(size_t size)
63 {
64     trace::ScopedTrace scoped_trace("Allocate Code");
65     void *code_ptr = arenaAllocator_.Alloc(size, PAGE_LOG_ALIGN);
66     if (UNLIKELY(code_ptr == nullptr)) {
67         return os::mem::MapRange<std::byte>(nullptr, 0);
68     }
69     memStats_->RecordAllocateRaw(size, SpaceType::SPACE_TYPE_CODE);
70     CodeRangeUpdate(code_ptr, size);
71     return os::mem::MapRange<std::byte>(static_cast<std::byte *>(code_ptr), size);
72 }
73 
74 /* static */
ProtectCode(os::mem::MapRange<std::byte> mem_range)75 void CodeAllocator::ProtectCode(os::mem::MapRange<std::byte> mem_range)
76 {
77     mem_range.MakeReadExec();
78 }
79 
InAllocatedCodeRange(const void * pc)80 bool CodeAllocator::InAllocatedCodeRange(const void *pc)
81 {
82     os::memory::ReadLockHolder rlock(code_range_lock_);
83     return (pc >= codeRangeStart_) && (pc <= codeRangeEnd_);
84 }
85 
CodeRangeUpdate(void * ptr,size_t size)86 void CodeAllocator::CodeRangeUpdate(void *ptr, size_t size)
87 {
88     os::memory::WriteLockHolder rwlock(code_range_lock_);
89     if (ptr < codeRangeStart_ || codeRangeStart_ == nullptr) {
90         codeRangeStart_ = ptr;
91     }
92     void *buffer_end = ToVoidPtr(ToUintPtr(ptr) + size);
93     if (buffer_end > codeRangeEnd_ || codeRangeEnd_ == nullptr) {
94         codeRangeEnd_ = buffer_end;
95     }
96 }
97 
98 }  // namespace panda
99