1 // Copyright 2021 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_WASM_STACKS_H_ 6 #define V8_WASM_STACKS_H_ 7 8 #if !V8_ENABLE_WEBASSEMBLY 9 #error This header should only be included if WebAssembly is enabled. 10 #endif // !V8_ENABLE_WEBASSEMBLY 11 12 #include "src/base/build_config.h" 13 #include "src/common/globals.h" 14 #include "src/execution/isolate.h" 15 #include "src/utils/allocation.h" 16 17 namespace v8 { 18 namespace internal { 19 namespace wasm { 20 21 struct JumpBuffer { 22 Address sp; 23 Address fp; 24 Address pc; 25 void* stack_limit; 26 // TODO(thibaudm/fgm): Add general-purpose registers. 27 }; 28 29 constexpr int kJmpBufSpOffset = offsetof(JumpBuffer, sp); 30 constexpr int kJmpBufFpOffset = offsetof(JumpBuffer, fp); 31 constexpr int kJmpBufPcOffset = offsetof(JumpBuffer, pc); 32 constexpr int kJmpBufStackLimitOffset = offsetof(JumpBuffer, stack_limit); 33 34 class StackMemory { 35 public: New(Isolate * isolate)36 static StackMemory* New(Isolate* isolate) { return new StackMemory(isolate); } 37 38 // Returns a non-owning view of the current stack. GetCurrentStackView(Isolate * isolate)39 static StackMemory* GetCurrentStackView(Isolate* isolate) { 40 byte* limit = 41 reinterpret_cast<byte*>(isolate->stack_guard()->real_jslimit()); 42 return new StackMemory(isolate, limit); 43 } 44 ~StackMemory()45 ~StackMemory() { 46 if (FLAG_trace_wasm_stack_switching) { 47 PrintF("Delete stack #%d\n", id_); 48 } 49 PageAllocator* allocator = GetPlatformPageAllocator(); 50 if (owned_) allocator->DecommitPages(limit_, size_); 51 // We don't need to handle removing the last stack from the list (next_ == 52 // this). This only happens on isolate tear down, otherwise there is always 53 // at least one reachable stack (the active stack). 54 isolate_->wasm_stacks() = next_; 55 prev_->next_ = next_; 56 next_->prev_ = prev_; 57 } 58 jslimit()59 void* jslimit() const { return limit_ + kJSLimitOffsetKB; } base()60 Address base() const { return reinterpret_cast<Address>(limit_ + size_); } jmpbuf()61 JumpBuffer* jmpbuf() { return &jmpbuf_; } id()62 int id() { return id_; } 63 64 // Insert a stack in the linked list after this stack. Add(StackMemory * stack)65 void Add(StackMemory* stack) { 66 stack->next_ = this->next_; 67 stack->prev_ = this; 68 this->next_->prev_ = stack; 69 this->next_ = stack; 70 } 71 next()72 StackMemory* next() { return next_; } 73 74 // Track external memory usage for Managed<StackMemory> objects. owned_size()75 size_t owned_size() { return sizeof(StackMemory) + (owned_ ? size_ : 0); } IsActive()76 bool IsActive() { 77 byte* sp = reinterpret_cast<byte*>(GetCurrentStackPosition()); 78 return limit_ < sp && sp <= limit_ + size_; 79 } 80 81 private: 82 #ifdef DEBUG 83 static constexpr int kJSLimitOffsetKB = 80; 84 #else 85 static constexpr int kJSLimitOffsetKB = 40; 86 #endif 87 88 // This constructor allocates a new stack segment. StackMemory(Isolate * isolate)89 explicit StackMemory(Isolate* isolate) : isolate_(isolate), owned_(true) { 90 static std::atomic<int> next_id(1); 91 id_ = next_id.fetch_add(1); 92 PageAllocator* allocator = GetPlatformPageAllocator(); 93 int kJsStackSizeKB = 4; 94 size_ = (kJsStackSizeKB + kJSLimitOffsetKB) * KB; 95 size_ = RoundUp(size_, allocator->AllocatePageSize()); 96 limit_ = static_cast<byte*>( 97 allocator->AllocatePages(nullptr, size_, allocator->AllocatePageSize(), 98 PageAllocator::kReadWrite)); 99 if (FLAG_trace_wasm_stack_switching) { 100 PrintF("Allocate stack #%d\n", id_); 101 } 102 } 103 104 // Overload to represent a view of the libc stack. StackMemory(Isolate * isolate,byte * limit)105 StackMemory(Isolate* isolate, byte* limit) 106 : isolate_(isolate), 107 limit_(limit), 108 size_(reinterpret_cast<size_t>(limit)), 109 owned_(false) { 110 id_ = 0; 111 } 112 113 Isolate* isolate_; 114 byte* limit_; 115 size_t size_; 116 bool owned_; 117 JumpBuffer jmpbuf_; 118 int id_; 119 // Stacks form a circular doubly linked list per isolate. 120 StackMemory* next_ = this; 121 StackMemory* prev_ = this; 122 }; 123 124 } // namespace wasm 125 } // namespace internal 126 } // namespace v8 127 128 #endif // V8_WASM_STACKS_H_ 129