1 /*
2 *
3 * Copyright 2016 gRPC authors.
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 * http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 *
17 */
18
19 #ifndef GRPC_CORE_LIB_SLICE_SLICE_INTERNAL_H
20 #define GRPC_CORE_LIB_SLICE_SLICE_INTERNAL_H
21
22 #include <grpc/support/port_platform.h>
23
24 #include <grpc/support/log.h>
25
26 #include <grpc/slice.h>
27 #include <grpc/slice_buffer.h>
28 #include <string.h>
29
30 #include "src/core/lib/gpr/murmur_hash.h"
31 #include "src/core/lib/gprpp/memory.h"
32 #include "src/core/lib/gprpp/ref_counted.h"
33 #include "src/core/lib/slice/slice_utils.h"
34 #include "src/core/lib/transport/static_metadata.h"
35
36 // Interned slices have specific fast-path operations for hashing. To inline
37 // these operations, we need to forward declare them here.
38 extern uint32_t grpc_static_metadata_hash_values[GRPC_STATIC_MDSTR_COUNT];
39
40 // grpc_slice_refcount : A reference count for grpc_slice.
41 //
42 // Non-inlined grpc_slice objects are refcounted. Historically this was
43 // implemented via grpc_slice_refcount, a C-style polymorphic class using a
44 // manually managed vtable of operations. Subclasses would define their own
45 // vtable; the 'virtual' methods (ref, unref, equals and hash) would simply call
46 // the function pointers in the vtable as necessary.
47 //
48 // Unfortunately, this leads to some inefficiencies in the generated code that
49 // can be improved upon. For example, equality checking for interned slices is a
50 // simple equality check on the refcount pointer. With the vtable approach, this
51 // would translate to roughly the following (high-level) instructions:
52 //
53 // grpc_slice_equals(slice1, slice2):
54 // load vtable->eq -> eq_func
55 // call eq_func(slice1, slice2)
56 //
57 // interned_slice_equals(slice1, slice2)
58 // load slice1.ref -> r1
59 // load slice2.ref -> r2
60 // cmp r1, r2 -> retval
61 // ret retval
62 //
63 // This leads to a function call for a function defined in another translation
64 // unit, which imposes memory barriers, which reduces the compiler's ability to
65 // optimize (in addition to the added overhead of call/ret). Additionally, it
66 // may be harder to reason about branch prediction when we're jumping to
67 // essentially arbitrarily provided function pointers.
68 //
69 // In addition, it is arguable that while virtualization was helpful for
70 // Equals()/Hash() methods, that it was fundamentally unnecessary for
71 // Ref()/Unref().
72 //
73 // Instead, grpc_slice_refcount provides the same functionality as the C-style
74 // virtual class, but in a de-virtualized manner - Eq(), Hash(), Ref() and
75 // Unref() are provided within this header file. Fastpaths for Eq()/Hash()
76 // (interned and static metadata slices), as well as the Ref() operation, can
77 // all be inlined without any memory barriers.
78 //
79 // It does this by:
80 // 1. Using grpc_core::RefCount<> (header-only) for Ref/Unref. Two special cases
81 // need support: No-op ref/unref (eg. static metadata slices) and stream
82 // slice references (where all the slices share the streamref). This is in
83 // addition to the normal case of '1 slice, 1 ref'.
84 // To support these cases, we explicitly track a nullable pointer to the
85 // underlying RefCount<>. No-op ref/unref is used by checking the pointer for
86 // null, and doing nothing if it is. Both stream slice refs and 'normal'
87 // slices use the same path for Ref/Unref (by targeting the non-null
88 // pointer).
89 //
90 // 2. introducing the notion of grpc_slice_refcount::Type. This describes if a
91 // slice ref is used by a static metadata slice, an interned slice, or other
92 // slices. We switch on the slice ref type in order to provide fastpaths for
93 // Equals() and Hash().
94 //
95 // In total, this saves us roughly 1-2% latency for unary calls, with smaller
96 // calls benefitting. The effect is present, but not as useful, for larger calls
97 // where the cost of sending the data dominates.
98 // TODO(arjunroy): Investigate if this can be removed with strongly typed
99 // grpc_slices.
100 struct grpc_slice_refcount {
101 public:
102 enum class Type {
103 STATIC, // Refcount for a static metadata slice.
104 INTERNED, // Refcount for an interned slice.
105 NOP, // No-Op
106 REGULAR // Refcount for non-static-metadata, non-interned slices.
107 };
108 typedef void (*DestroyerFn)(void*);
109
110 grpc_slice_refcount() = default;
111
grpc_slice_refcountgrpc_slice_refcount112 explicit grpc_slice_refcount(Type t) : ref_type_(t) {}
113
grpc_slice_refcountgrpc_slice_refcount114 explicit grpc_slice_refcount(grpc_slice_refcount* sub) : sub_refcount_(sub) {}
115 // Regular constructor for grpc_slice_refcount.
116 //
117 // Parameters:
118 // 1. grpc_slice_refcount::Type type
119 // Whether we are the refcount for a static
120 // metadata slice, an interned slice, or any other kind of slice.
121 //
122 // 2. RefCount* ref
123 // The pointer to the actual underlying grpc_core::RefCount. Rather than
124 // performing struct offset computations as in the original implementation to
125 // get to the refcount, which requires a virtual method, we devirtualize by
126 // using a nullable pointer to allow a single pair of Ref/Unref methods.
127 //
128 // 3. DestroyerFn destroyer_fn
129 // Called when the refcount goes to 0, with destroyer_arg as parameter.
130 //
131 // 4. void* destroyer_arg
132 // Argument for the virtualized destructor.
133 //
134 // 5. grpc_slice_refcount* sub
135 // Argument used for interned slices.
grpc_slice_refcountgrpc_slice_refcount136 grpc_slice_refcount(grpc_slice_refcount::Type type, grpc_core::RefCount* ref,
137 DestroyerFn destroyer_fn, void* destroyer_arg,
138 grpc_slice_refcount* sub)
139 : ref_(ref),
140 ref_type_(type),
141 sub_refcount_(sub),
142 dest_fn_(destroyer_fn),
143 destroy_fn_arg_(destroyer_arg) {}
144 // Initializer for static refcounts.
grpc_slice_refcountgrpc_slice_refcount145 grpc_slice_refcount(grpc_slice_refcount* sub, Type type)
146 : ref_type_(type), sub_refcount_(sub) {}
147
GetTypegrpc_slice_refcount148 Type GetType() const { return ref_type_; }
149
150 int Eq(const grpc_slice& a, const grpc_slice& b);
151
152 uint32_t Hash(const grpc_slice& slice);
Refgrpc_slice_refcount153 void Ref() {
154 if (ref_ == nullptr) return;
155 ref_->RefNonZero();
156 }
Unrefgrpc_slice_refcount157 void Unref() {
158 if (ref_ == nullptr) return;
159 if (ref_->Unref()) {
160 dest_fn_(destroy_fn_arg_);
161 }
162 }
163
sub_refcountgrpc_slice_refcount164 grpc_slice_refcount* sub_refcount() const { return sub_refcount_; }
165
166 private:
167 grpc_core::RefCount* ref_ = nullptr;
168 const Type ref_type_ = Type::REGULAR;
169 grpc_slice_refcount* sub_refcount_ = this;
170 DestroyerFn dest_fn_ = nullptr;
171 void* destroy_fn_arg_ = nullptr;
172 };
173
174 namespace grpc_core {
175
176 struct StaticSliceRefcount {
177 static grpc_slice_refcount kStaticSubRefcount;
178
StaticSliceRefcountStaticSliceRefcount179 explicit StaticSliceRefcount(uint32_t index)
180 : base(&kStaticSubRefcount, grpc_slice_refcount::Type::STATIC),
181 index(index) {}
182
183 grpc_slice_refcount base;
184 const uint32_t index;
185 };
186
187 extern grpc_slice_refcount kNoopRefcount;
188
189 struct InternedSliceRefcount {
DestroyInternedSliceRefcount190 static void Destroy(void* arg) {
191 auto* rc = static_cast<InternedSliceRefcount*>(arg);
192 rc->~InternedSliceRefcount();
193 gpr_free(rc);
194 }
195
InternedSliceRefcountInternedSliceRefcount196 InternedSliceRefcount(size_t length, uint32_t hash,
197 InternedSliceRefcount* bucket_next)
198 : base(grpc_slice_refcount::Type::INTERNED, &refcnt, Destroy, this, &sub),
199 sub(grpc_slice_refcount::Type::REGULAR, &refcnt, Destroy, this, &sub),
200 length(length),
201 hash(hash),
202 bucket_next(bucket_next) {}
203
204 ~InternedSliceRefcount();
205
206 grpc_slice_refcount base;
207 grpc_slice_refcount sub;
208 const size_t length;
209 RefCount refcnt;
210 const uint32_t hash;
211 InternedSliceRefcount* bucket_next;
212 };
213
214 } // namespace grpc_core
215
grpc_refcounted_slice_length(const grpc_slice & slice)216 inline size_t grpc_refcounted_slice_length(const grpc_slice& slice) {
217 GPR_DEBUG_ASSERT(slice.refcount != nullptr);
218 return slice.data.refcounted.length;
219 }
220
grpc_refcounted_slice_data(const grpc_slice & slice)221 inline const uint8_t* grpc_refcounted_slice_data(const grpc_slice& slice) {
222 GPR_DEBUG_ASSERT(slice.refcount != nullptr);
223 return slice.data.refcounted.bytes;
224 }
225
Eq(const grpc_slice & a,const grpc_slice & b)226 inline int grpc_slice_refcount::Eq(const grpc_slice& a, const grpc_slice& b) {
227 GPR_DEBUG_ASSERT(a.refcount != nullptr);
228 GPR_DEBUG_ASSERT(a.refcount == this);
229 switch (ref_type_) {
230 case Type::STATIC:
231 GPR_DEBUG_ASSERT(
232 (GRPC_STATIC_METADATA_INDEX(a) == GRPC_STATIC_METADATA_INDEX(b)) ==
233 (a.refcount == b.refcount));
234 case Type::INTERNED:
235 return a.refcount == b.refcount;
236 case Type::NOP:
237 case Type::REGULAR:
238 break;
239 }
240 if (grpc_refcounted_slice_length(a) != GRPC_SLICE_LENGTH(b)) return false;
241 if (grpc_refcounted_slice_length(a) == 0) return true;
242 return 0 == memcmp(grpc_refcounted_slice_data(a), GRPC_SLICE_START_PTR(b),
243 grpc_refcounted_slice_length(a));
244 }
245
Hash(const grpc_slice & slice)246 inline uint32_t grpc_slice_refcount::Hash(const grpc_slice& slice) {
247 GPR_DEBUG_ASSERT(slice.refcount != nullptr);
248 GPR_DEBUG_ASSERT(slice.refcount == this);
249 switch (ref_type_) {
250 case Type::STATIC:
251 return ::grpc_static_metadata_hash_values[GRPC_STATIC_METADATA_INDEX(
252 slice)];
253 case Type::INTERNED:
254 return reinterpret_cast<grpc_core::InternedSliceRefcount*>(slice.refcount)
255 ->hash;
256 case Type::NOP:
257 case Type::REGULAR:
258 break;
259 }
260 return gpr_murmur_hash3(grpc_refcounted_slice_data(slice),
261 grpc_refcounted_slice_length(slice),
262 grpc_core::g_hash_seed);
263 }
264
grpc_slice_ref_internal(const grpc_slice & slice)265 inline const grpc_slice& grpc_slice_ref_internal(const grpc_slice& slice) {
266 if (slice.refcount) {
267 slice.refcount->Ref();
268 }
269 return slice;
270 }
271
grpc_slice_unref_internal(const grpc_slice & slice)272 inline void grpc_slice_unref_internal(const grpc_slice& slice) {
273 if (slice.refcount) {
274 slice.refcount->Unref();
275 }
276 }
277
278 void grpc_slice_buffer_reset_and_unref_internal(grpc_slice_buffer* sb);
279 void grpc_slice_buffer_partial_unref_internal(grpc_slice_buffer* sb,
280 size_t idx);
281 void grpc_slice_buffer_destroy_internal(grpc_slice_buffer* sb);
282
283 // Returns a pointer to the first slice in the slice buffer without giving
284 // ownership to or a reference count on that slice.
grpc_slice_buffer_peek_first(grpc_slice_buffer * sb)285 inline grpc_slice* grpc_slice_buffer_peek_first(grpc_slice_buffer* sb) {
286 GPR_DEBUG_ASSERT(sb->count > 0);
287 return &sb->slices[0];
288 }
289
290 // Removes the first slice from the slice buffer.
291 void grpc_slice_buffer_remove_first(grpc_slice_buffer* sb);
292
293 // Calls grpc_slice_sub with the given parameters on the first slice.
294 void grpc_slice_buffer_sub_first(grpc_slice_buffer* sb, size_t begin,
295 size_t end);
296
297 /* Check if a slice is interned */
298 bool grpc_slice_is_interned(const grpc_slice& slice);
grpc_slice_is_interned(const grpc_slice & slice)299 inline bool grpc_slice_is_interned(const grpc_slice& slice) {
300 return (slice.refcount &&
301 (slice.refcount->GetType() == grpc_slice_refcount::Type::INTERNED ||
302 slice.refcount->GetType() == grpc_slice_refcount::Type::STATIC));
303 }
304
grpc_slice_static_interned_equal(const grpc_slice & a,const grpc_slice & b)305 inline bool grpc_slice_static_interned_equal(const grpc_slice& a,
306 const grpc_slice& b) {
307 GPR_DEBUG_ASSERT(grpc_slice_is_interned(a) && grpc_slice_is_interned(b));
308 return a.refcount == b.refcount;
309 }
310
311 void grpc_slice_intern_init(void);
312 void grpc_slice_intern_shutdown(void);
313 void grpc_test_only_set_slice_hash_seed(uint32_t seed);
314 // if slice matches a static slice, returns the static slice
315 // otherwise returns the passed in slice (without reffing it)
316 // used for surface boundaries where we might receive an un-interned static
317 // string
318 grpc_slice grpc_slice_maybe_static_intern(grpc_slice slice,
319 bool* returned_slice_is_different);
320 uint32_t grpc_static_slice_hash(grpc_slice s);
321 int grpc_static_slice_eq(grpc_slice a, grpc_slice b);
322
grpc_slice_hash_refcounted(const grpc_slice & s)323 inline uint32_t grpc_slice_hash_refcounted(const grpc_slice& s) {
324 GPR_DEBUG_ASSERT(s.refcount != nullptr);
325 return s.refcount->Hash(s);
326 }
327
grpc_slice_default_hash_internal(const grpc_slice & s)328 inline uint32_t grpc_slice_default_hash_internal(const grpc_slice& s) {
329 return gpr_murmur_hash3(GRPC_SLICE_START_PTR(s), GRPC_SLICE_LENGTH(s),
330 grpc_core::g_hash_seed);
331 }
332
grpc_slice_hash_internal(const grpc_slice & s)333 inline uint32_t grpc_slice_hash_internal(const grpc_slice& s) {
334 return s.refcount == nullptr ? grpc_slice_default_hash_internal(s)
335 : grpc_slice_hash_refcounted(s);
336 }
337
338 grpc_slice grpc_slice_from_moved_buffer(grpc_core::UniquePtr<char> p,
339 size_t len);
340 grpc_slice grpc_slice_from_moved_string(grpc_core::UniquePtr<char> p);
341 grpc_slice grpc_slice_from_cpp_string(std::string str);
342
343 // Returns the memory used by this slice, not counting the slice structure
344 // itself. This means that inlined and slices from static strings will return
345 // 0. All other slices will return the size of the allocated chars.
346 size_t grpc_slice_memory_usage(grpc_slice s);
347
348 grpc_core::UnmanagedMemorySlice grpc_slice_sub_no_ref(
349 const grpc_core::UnmanagedMemorySlice& source, size_t begin, size_t end);
350
351 namespace grpc_core {
352
353 struct SliceHash {
operatorSliceHash354 std::size_t operator()(const grpc_slice& slice) const {
355 return grpc_slice_hash_internal(slice);
356 }
357 };
358
359 } // namespace grpc_core
360
361 inline bool operator==(const grpc_slice& s1, const grpc_slice& s2) {
362 return grpc_slice_eq(s1, s2);
363 }
364
365 #endif /* GRPC_CORE_LIB_SLICE_SLICE_INTERNAL_H */
366