• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2024 gRPC authors.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 #ifndef GRPC_SRC_CORE_LIB_TRANSPORT_CALL_ARENA_ALLOCATOR_H
16 #define GRPC_SRC_CORE_LIB_TRANSPORT_CALL_ARENA_ALLOCATOR_H
17 
18 #include <grpc/support/port_platform.h>
19 #include <stddef.h>
20 
21 #include <atomic>
22 #include <cstddef>
23 
24 #include "src/core/lib/resource_quota/arena.h"
25 #include "src/core/lib/resource_quota/memory_quota.h"
26 #include "src/core/util/ref_counted.h"
27 
28 namespace grpc_core {
29 
30 class CallSizeEstimator final {
31  public:
CallSizeEstimator(size_t initial_estimate)32   explicit CallSizeEstimator(size_t initial_estimate)
33       : call_size_estimate_(initial_estimate) {}
34 
CallSizeEstimate()35   GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION size_t CallSizeEstimate() {
36     // We round up our current estimate to the NEXT value of kRoundUpSize.
37     // This ensures:
38     //  1. a consistent size allocation when our estimate is drifting slowly
39     //     (which is common) - which tends to help most allocators reuse memory
40     //  2. a small amount of allowed growth over the estimate without hitting
41     //     the arena size doubling case, reducing overall memory usage
42     static constexpr size_t kRoundUpSize = 256;
43     return (call_size_estimate_.load(std::memory_order_relaxed) +
44             2 * kRoundUpSize) &
45            ~(kRoundUpSize - 1);
46   }
47 
UpdateCallSizeEstimate(size_t size)48   GPR_ATTRIBUTE_ALWAYS_INLINE_FUNCTION void UpdateCallSizeEstimate(
49       size_t size) {
50     size_t cur = call_size_estimate_.load(std::memory_order_relaxed);
51     if (cur < size) {
52       // size grew: update estimate
53       call_size_estimate_.compare_exchange_weak(
54           cur, size, std::memory_order_relaxed, std::memory_order_relaxed);
55       // if we lose: never mind, something else will likely update soon enough
56     } else if (cur == size) {
57       // no change: holding pattern
58     } else if (cur > 0) {
59       // size shrank: decrease estimate
60       call_size_estimate_.compare_exchange_weak(
61           cur, std::min(cur - 1, (255 * cur + size) / 256),
62           std::memory_order_relaxed, std::memory_order_relaxed);
63       // if we lose: never mind, something else will likely update soon enough
64     }
65   }
66 
67  private:
68   std::atomic<size_t> call_size_estimate_;
69 };
70 
71 class CallArenaAllocator final : public ArenaFactory {
72  public:
CallArenaAllocator(MemoryAllocator allocator,size_t initial_size)73   CallArenaAllocator(MemoryAllocator allocator, size_t initial_size)
74       : ArenaFactory(std::move(allocator)),
75         call_size_estimator_(initial_size) {}
76 
MakeArena()77   RefCountedPtr<Arena> MakeArena() override {
78     return Arena::Create(call_size_estimator_.CallSizeEstimate(), Ref());
79   }
80 
81   void FinalizeArena(Arena* arena) override;
82 
CallSizeEstimate()83   size_t CallSizeEstimate() { return call_size_estimator_.CallSizeEstimate(); }
84 
85  private:
86   CallSizeEstimator call_size_estimator_;
87 };
88 
89 }  // namespace grpc_core
90 
91 #endif  // GRPC_SRC_CORE_LIB_TRANSPORT_CALL_ARENA_ALLOCATOR_H
92