1 // Copyright Joyent, Inc. and other Node contributors.
2 //
3 // Permission is hereby granted, free of charge, to any person obtaining a
4 // copy of this software and associated documentation files (the
5 // "Software"), to deal in the Software without restriction, including
6 // without limitation the rights to use, copy, modify, merge, publish,
7 // distribute, sublicense, and/or sell copies of the Software, and to permit
8 // persons to whom the Software is furnished to do so, subject to the
9 // following conditions:
10 //
11 // The above copyright notice and this permission notice shall be included
12 // in all copies or substantial portions of the Software.
13 //
14 // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
15 // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
17 // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
18 // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
19 // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
20 // USE OR OTHER DEALINGS IN THE SOFTWARE.
21
22 #ifndef SRC_UTIL_INL_H_
23 #define SRC_UTIL_INL_H_
24
25 #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS
26
27 #include <cmath>
28 #include <cstring>
29 #include <locale>
30 #include "util.h"
31
32 // These are defined by <sys/byteorder.h> or <netinet/in.h> on some systems.
33 // To avoid warnings, undefine them before redefining them.
34 #ifdef BSWAP_2
35 # undef BSWAP_2
36 #endif
37 #ifdef BSWAP_4
38 # undef BSWAP_4
39 #endif
40 #ifdef BSWAP_8
41 # undef BSWAP_8
42 #endif
43
44 #if defined(_MSC_VER)
45 #include <intrin.h>
46 #define BSWAP_2(x) _byteswap_ushort(x)
47 #define BSWAP_4(x) _byteswap_ulong(x)
48 #define BSWAP_8(x) _byteswap_uint64(x)
49 #else
50 #define BSWAP_2(x) ((x) << 8) | ((x) >> 8)
51 #define BSWAP_4(x) \
52 (((x) & 0xFF) << 24) | \
53 (((x) & 0xFF00) << 8) | \
54 (((x) >> 8) & 0xFF00) | \
55 (((x) >> 24) & 0xFF)
56 #define BSWAP_8(x) \
57 (((x) & 0xFF00000000000000ull) >> 56) | \
58 (((x) & 0x00FF000000000000ull) >> 40) | \
59 (((x) & 0x0000FF0000000000ull) >> 24) | \
60 (((x) & 0x000000FF00000000ull) >> 8) | \
61 (((x) & 0x00000000FF000000ull) << 8) | \
62 (((x) & 0x0000000000FF0000ull) << 24) | \
63 (((x) & 0x000000000000FF00ull) << 40) | \
64 (((x) & 0x00000000000000FFull) << 56)
65 #endif
66
67 #define CHAR_TEST(bits, name, expr) \
68 template <typename T> \
69 bool name(const T ch) { \
70 static_assert(sizeof(ch) >= (bits) / 8, \
71 "Character must be wider than " #bits " bits"); \
72 return (expr); \
73 }
74
75 namespace node {
76
77 template <typename T>
ListNode()78 ListNode<T>::ListNode() : prev_(this), next_(this) {}
79
80 template <typename T>
~ListNode()81 ListNode<T>::~ListNode() {
82 Remove();
83 }
84
85 template <typename T>
Remove()86 void ListNode<T>::Remove() {
87 prev_->next_ = next_;
88 next_->prev_ = prev_;
89 prev_ = this;
90 next_ = this;
91 }
92
93 template <typename T>
IsEmpty()94 bool ListNode<T>::IsEmpty() const {
95 return prev_ == this;
96 }
97
98 template <typename T, ListNode<T> (T::*M)>
Iterator(ListNode<T> * node)99 ListHead<T, M>::Iterator::Iterator(ListNode<T>* node) : node_(node) {}
100
101 template <typename T, ListNode<T> (T::*M)>
102 T* ListHead<T, M>::Iterator::operator*() const {
103 return ContainerOf(M, node_);
104 }
105
106 template <typename T, ListNode<T> (T::*M)>
107 const typename ListHead<T, M>::Iterator&
108 ListHead<T, M>::Iterator::operator++() {
109 node_ = node_->next_;
110 return *this;
111 }
112
113 template <typename T, ListNode<T> (T::*M)>
114 bool ListHead<T, M>::Iterator::operator!=(const Iterator& that) const {
115 return node_ != that.node_;
116 }
117
118 template <typename T, ListNode<T> (T::*M)>
~ListHead()119 ListHead<T, M>::~ListHead() {
120 while (IsEmpty() == false)
121 head_.next_->Remove();
122 }
123
124 template <typename T, ListNode<T> (T::*M)>
PushBack(T * element)125 void ListHead<T, M>::PushBack(T* element) {
126 ListNode<T>* that = &(element->*M);
127 head_.prev_->next_ = that;
128 that->prev_ = head_.prev_;
129 that->next_ = &head_;
130 head_.prev_ = that;
131 }
132
133 template <typename T, ListNode<T> (T::*M)>
PushFront(T * element)134 void ListHead<T, M>::PushFront(T* element) {
135 ListNode<T>* that = &(element->*M);
136 head_.next_->prev_ = that;
137 that->prev_ = &head_;
138 that->next_ = head_.next_;
139 head_.next_ = that;
140 }
141
142 template <typename T, ListNode<T> (T::*M)>
IsEmpty()143 bool ListHead<T, M>::IsEmpty() const {
144 return head_.IsEmpty();
145 }
146
147 template <typename T, ListNode<T> (T::*M)>
PopFront()148 T* ListHead<T, M>::PopFront() {
149 if (IsEmpty())
150 return nullptr;
151 ListNode<T>* node = head_.next_;
152 node->Remove();
153 return ContainerOf(M, node);
154 }
155
156 template <typename T, ListNode<T> (T::*M)>
begin()157 typename ListHead<T, M>::Iterator ListHead<T, M>::begin() const {
158 return Iterator(head_.next_);
159 }
160
161 template <typename T, ListNode<T> (T::*M)>
end()162 typename ListHead<T, M>::Iterator ListHead<T, M>::end() const {
163 return Iterator(const_cast<ListNode<T>*>(&head_));
164 }
165
166 template <typename Inner, typename Outer>
OffsetOf(Inner Outer::* field)167 constexpr uintptr_t OffsetOf(Inner Outer::*field) {
168 return reinterpret_cast<uintptr_t>(&(static_cast<Outer*>(nullptr)->*field));
169 }
170
171 template <typename Inner, typename Outer>
ContainerOfHelper(Inner Outer::* field,Inner * pointer)172 ContainerOfHelper<Inner, Outer>::ContainerOfHelper(Inner Outer::*field,
173 Inner* pointer)
174 : pointer_(
175 reinterpret_cast<Outer*>(
176 reinterpret_cast<uintptr_t>(pointer) - OffsetOf(field))) {}
177
178 template <typename Inner, typename Outer>
179 template <typename TypeName>
180 ContainerOfHelper<Inner, Outer>::operator TypeName*() const {
181 return static_cast<TypeName*>(pointer_);
182 }
183
184 template <typename Inner, typename Outer>
ContainerOf(Inner Outer::* field,Inner * pointer)185 constexpr ContainerOfHelper<Inner, Outer> ContainerOf(Inner Outer::*field,
186 Inner* pointer) {
187 return ContainerOfHelper<Inner, Outer>(field, pointer);
188 }
189
OneByteString(v8::Isolate * isolate,const char * data,int length)190 inline v8::Local<v8::String> OneByteString(v8::Isolate* isolate,
191 const char* data,
192 int length) {
193 return v8::String::NewFromOneByte(isolate,
194 reinterpret_cast<const uint8_t*>(data),
195 v8::NewStringType::kNormal,
196 length).ToLocalChecked();
197 }
198
OneByteString(v8::Isolate * isolate,const signed char * data,int length)199 inline v8::Local<v8::String> OneByteString(v8::Isolate* isolate,
200 const signed char* data,
201 int length) {
202 return v8::String::NewFromOneByte(isolate,
203 reinterpret_cast<const uint8_t*>(data),
204 v8::NewStringType::kNormal,
205 length).ToLocalChecked();
206 }
207
OneByteString(v8::Isolate * isolate,const unsigned char * data,int length)208 inline v8::Local<v8::String> OneByteString(v8::Isolate* isolate,
209 const unsigned char* data,
210 int length) {
211 return v8::String::NewFromOneByte(
212 isolate, data, v8::NewStringType::kNormal, length)
213 .ToLocalChecked();
214 }
215
SwapBytes16(char * data,size_t nbytes)216 void SwapBytes16(char* data, size_t nbytes) {
217 CHECK_EQ(nbytes % 2, 0);
218
219 #if defined(_MSC_VER)
220 if (AlignUp(data, sizeof(uint16_t)) == data) {
221 // MSVC has no strict aliasing, and is able to highly optimize this case.
222 uint16_t* data16 = reinterpret_cast<uint16_t*>(data);
223 size_t len16 = nbytes / sizeof(*data16);
224 for (size_t i = 0; i < len16; i++) {
225 data16[i] = BSWAP_2(data16[i]);
226 }
227 return;
228 }
229 #endif
230
231 uint16_t temp;
232 for (size_t i = 0; i < nbytes; i += sizeof(temp)) {
233 memcpy(&temp, &data[i], sizeof(temp));
234 temp = BSWAP_2(temp);
235 memcpy(&data[i], &temp, sizeof(temp));
236 }
237 }
238
SwapBytes32(char * data,size_t nbytes)239 void SwapBytes32(char* data, size_t nbytes) {
240 CHECK_EQ(nbytes % 4, 0);
241
242 #if defined(_MSC_VER)
243 // MSVC has no strict aliasing, and is able to highly optimize this case.
244 if (AlignUp(data, sizeof(uint32_t)) == data) {
245 uint32_t* data32 = reinterpret_cast<uint32_t*>(data);
246 size_t len32 = nbytes / sizeof(*data32);
247 for (size_t i = 0; i < len32; i++) {
248 data32[i] = BSWAP_4(data32[i]);
249 }
250 return;
251 }
252 #endif
253
254 uint32_t temp;
255 for (size_t i = 0; i < nbytes; i += sizeof(temp)) {
256 memcpy(&temp, &data[i], sizeof(temp));
257 temp = BSWAP_4(temp);
258 memcpy(&data[i], &temp, sizeof(temp));
259 }
260 }
261
SwapBytes64(char * data,size_t nbytes)262 void SwapBytes64(char* data, size_t nbytes) {
263 CHECK_EQ(nbytes % 8, 0);
264
265 #if defined(_MSC_VER)
266 if (AlignUp(data, sizeof(uint64_t)) == data) {
267 // MSVC has no strict aliasing, and is able to highly optimize this case.
268 uint64_t* data64 = reinterpret_cast<uint64_t*>(data);
269 size_t len64 = nbytes / sizeof(*data64);
270 for (size_t i = 0; i < len64; i++) {
271 data64[i] = BSWAP_8(data64[i]);
272 }
273 return;
274 }
275 #endif
276
277 uint64_t temp;
278 for (size_t i = 0; i < nbytes; i += sizeof(temp)) {
279 memcpy(&temp, &data[i], sizeof(temp));
280 temp = BSWAP_8(temp);
281 memcpy(&data[i], &temp, sizeof(temp));
282 }
283 }
284
ToLower(char c)285 char ToLower(char c) {
286 return std::tolower(c, std::locale::classic());
287 }
288
ToLower(const std::string & in)289 std::string ToLower(const std::string& in) {
290 std::string out(in.size(), 0);
291 for (size_t i = 0; i < in.size(); ++i)
292 out[i] = ToLower(in[i]);
293 return out;
294 }
295
ToUpper(char c)296 char ToUpper(char c) {
297 return std::toupper(c, std::locale::classic());
298 }
299
ToUpper(const std::string & in)300 std::string ToUpper(const std::string& in) {
301 std::string out(in.size(), 0);
302 for (size_t i = 0; i < in.size(); ++i)
303 out[i] = ToUpper(in[i]);
304 return out;
305 }
306
StringEqualNoCase(const char * a,const char * b)307 bool StringEqualNoCase(const char* a, const char* b) {
308 while (ToLower(*a) == ToLower(*b++)) {
309 if (*a++ == '\0')
310 return true;
311 }
312 return false;
313 }
314
StringEqualNoCaseN(const char * a,const char * b,size_t length)315 bool StringEqualNoCaseN(const char* a, const char* b, size_t length) {
316 for (size_t i = 0; i < length; i++) {
317 if (ToLower(a[i]) != ToLower(b[i]))
318 return false;
319 if (a[i] == '\0')
320 return true;
321 }
322 return true;
323 }
324
325 template <typename T>
MultiplyWithOverflowCheck(T a,T b)326 inline T MultiplyWithOverflowCheck(T a, T b) {
327 auto ret = a * b;
328 if (a != 0)
329 CHECK_EQ(b, ret / a);
330
331 return ret;
332 }
333
334 // These should be used in our code as opposed to the native
335 // versions as they abstract out some platform and or
336 // compiler version specific functionality.
337 // malloc(0) and realloc(ptr, 0) have implementation-defined behavior in
338 // that the standard allows them to either return a unique pointer or a
339 // nullptr for zero-sized allocation requests. Normalize by always using
340 // a nullptr.
341 template <typename T>
UncheckedRealloc(T * pointer,size_t n)342 T* UncheckedRealloc(T* pointer, size_t n) {
343 size_t full_size = MultiplyWithOverflowCheck(sizeof(T), n);
344
345 if (full_size == 0) {
346 free(pointer);
347 return nullptr;
348 }
349
350 void* allocated = realloc(pointer, full_size);
351
352 if (UNLIKELY(allocated == nullptr)) {
353 // Tell V8 that memory is low and retry.
354 LowMemoryNotification();
355 allocated = realloc(pointer, full_size);
356 }
357
358 return static_cast<T*>(allocated);
359 }
360
361 // As per spec realloc behaves like malloc if passed nullptr.
362 template <typename T>
UncheckedMalloc(size_t n)363 inline T* UncheckedMalloc(size_t n) {
364 return UncheckedRealloc<T>(nullptr, n);
365 }
366
367 template <typename T>
UncheckedCalloc(size_t n)368 inline T* UncheckedCalloc(size_t n) {
369 if (MultiplyWithOverflowCheck(sizeof(T), n) == 0) return nullptr;
370 return static_cast<T*>(calloc(n, sizeof(T)));
371 }
372
373 template <typename T>
Realloc(T * pointer,size_t n)374 inline T* Realloc(T* pointer, size_t n) {
375 T* ret = UncheckedRealloc(pointer, n);
376 CHECK_IMPLIES(n > 0, ret != nullptr);
377 return ret;
378 }
379
380 template <typename T>
Malloc(size_t n)381 inline T* Malloc(size_t n) {
382 T* ret = UncheckedMalloc<T>(n);
383 CHECK_IMPLIES(n > 0, ret != nullptr);
384 return ret;
385 }
386
387 template <typename T>
Calloc(size_t n)388 inline T* Calloc(size_t n) {
389 T* ret = UncheckedCalloc<T>(n);
390 CHECK_IMPLIES(n > 0, ret != nullptr);
391 return ret;
392 }
393
394 // Shortcuts for char*.
Malloc(size_t n)395 inline char* Malloc(size_t n) { return Malloc<char>(n); }
Calloc(size_t n)396 inline char* Calloc(size_t n) { return Calloc<char>(n); }
UncheckedMalloc(size_t n)397 inline char* UncheckedMalloc(size_t n) { return UncheckedMalloc<char>(n); }
UncheckedCalloc(size_t n)398 inline char* UncheckedCalloc(size_t n) { return UncheckedCalloc<char>(n); }
399
400 // This is a helper in the .cc file so including util-inl.h doesn't include more
401 // headers than we really need to.
402 void ThrowErrStringTooLong(v8::Isolate* isolate);
403
ToV8Value(v8::Local<v8::Context> context,std::string_view str,v8::Isolate * isolate)404 v8::MaybeLocal<v8::Value> ToV8Value(v8::Local<v8::Context> context,
405 std::string_view str,
406 v8::Isolate* isolate) {
407 if (isolate == nullptr) isolate = context->GetIsolate();
408 if (UNLIKELY(str.size() >= static_cast<size_t>(v8::String::kMaxLength))) {
409 // V8 only has a TODO comment about adding an exception when the maximum
410 // string size is exceeded.
411 ThrowErrStringTooLong(isolate);
412 return v8::MaybeLocal<v8::Value>();
413 }
414
415 return v8::String::NewFromUtf8(
416 isolate, str.data(), v8::NewStringType::kNormal, str.size())
417 .FromMaybe(v8::Local<v8::String>());
418 }
419
420 template <typename T>
ToV8Value(v8::Local<v8::Context> context,const std::vector<T> & vec,v8::Isolate * isolate)421 v8::MaybeLocal<v8::Value> ToV8Value(v8::Local<v8::Context> context,
422 const std::vector<T>& vec,
423 v8::Isolate* isolate) {
424 if (isolate == nullptr) isolate = context->GetIsolate();
425 v8::EscapableHandleScope handle_scope(isolate);
426
427 MaybeStackBuffer<v8::Local<v8::Value>, 128> arr(vec.size());
428 arr.SetLength(vec.size());
429 for (size_t i = 0; i < vec.size(); ++i) {
430 if (!ToV8Value(context, vec[i], isolate).ToLocal(&arr[i]))
431 return v8::MaybeLocal<v8::Value>();
432 }
433
434 return handle_scope.Escape(v8::Array::New(isolate, arr.out(), arr.length()));
435 }
436
437 template <typename T>
ToV8Value(v8::Local<v8::Context> context,const std::set<T> & set,v8::Isolate * isolate)438 v8::MaybeLocal<v8::Value> ToV8Value(v8::Local<v8::Context> context,
439 const std::set<T>& set,
440 v8::Isolate* isolate) {
441 if (isolate == nullptr) isolate = context->GetIsolate();
442 v8::Local<v8::Set> set_js = v8::Set::New(isolate);
443 v8::HandleScope handle_scope(isolate);
444
445 for (const T& entry : set) {
446 v8::Local<v8::Value> value;
447 if (!ToV8Value(context, entry, isolate).ToLocal(&value))
448 return {};
449 if (set_js->Add(context, value).IsEmpty())
450 return {};
451 }
452
453 return set_js;
454 }
455
456 template <typename T, typename U>
ToV8Value(v8::Local<v8::Context> context,const std::unordered_map<T,U> & map,v8::Isolate * isolate)457 v8::MaybeLocal<v8::Value> ToV8Value(v8::Local<v8::Context> context,
458 const std::unordered_map<T, U>& map,
459 v8::Isolate* isolate) {
460 if (isolate == nullptr) isolate = context->GetIsolate();
461 v8::EscapableHandleScope handle_scope(isolate);
462
463 v8::Local<v8::Map> ret = v8::Map::New(isolate);
464 for (const auto& item : map) {
465 v8::Local<v8::Value> first, second;
466 if (!ToV8Value(context, item.first, isolate).ToLocal(&first) ||
467 !ToV8Value(context, item.second, isolate).ToLocal(&second) ||
468 ret->Set(context, first, second).IsEmpty()) {
469 return v8::MaybeLocal<v8::Value>();
470 }
471 }
472
473 return handle_scope.Escape(ret);
474 }
475
476 template <typename T, typename >
ToV8Value(v8::Local<v8::Context> context,const T & number,v8::Isolate * isolate)477 v8::MaybeLocal<v8::Value> ToV8Value(v8::Local<v8::Context> context,
478 const T& number,
479 v8::Isolate* isolate) {
480 if (isolate == nullptr) isolate = context->GetIsolate();
481
482 using Limits = std::numeric_limits<T>;
483 // Choose Uint32, Int32, or Double depending on range checks.
484 // These checks should all collapse at compile time.
485 if (static_cast<uint32_t>(Limits::max()) <=
486 std::numeric_limits<uint32_t>::max() &&
487 static_cast<uint32_t>(Limits::min()) >=
488 std::numeric_limits<uint32_t>::min() && Limits::is_exact) {
489 return v8::Integer::NewFromUnsigned(isolate, static_cast<uint32_t>(number));
490 }
491
492 if (static_cast<int32_t>(Limits::max()) <=
493 std::numeric_limits<int32_t>::max() &&
494 static_cast<int32_t>(Limits::min()) >=
495 std::numeric_limits<int32_t>::min() && Limits::is_exact) {
496 return v8::Integer::New(isolate, static_cast<int32_t>(number));
497 }
498
499 return v8::Number::New(isolate, static_cast<double>(number));
500 }
501
SlicedArguments(const v8::FunctionCallbackInfo<v8::Value> & args,size_t start)502 SlicedArguments::SlicedArguments(
503 const v8::FunctionCallbackInfo<v8::Value>& args, size_t start) {
504 const size_t length = static_cast<size_t>(args.Length());
505 if (start >= length) return;
506 const size_t size = length - start;
507
508 AllocateSufficientStorage(size);
509 for (size_t i = 0; i < size; ++i)
510 (*this)[i] = args[i + start];
511 }
512
513 template <typename T, size_t kStackStorageSize>
AllocateSufficientStorage(size_t storage)514 void MaybeStackBuffer<T, kStackStorageSize>::AllocateSufficientStorage(
515 size_t storage) {
516 CHECK(!IsInvalidated());
517 if (storage > capacity()) {
518 bool was_allocated = IsAllocated();
519 T* allocated_ptr = was_allocated ? buf_ : nullptr;
520 buf_ = Realloc(allocated_ptr, storage);
521 capacity_ = storage;
522 if (!was_allocated && length_ > 0)
523 memcpy(buf_, buf_st_, length_ * sizeof(buf_[0]));
524 }
525
526 length_ = storage;
527 }
528
529 template <typename T, size_t S>
ArrayBufferViewContents(v8::Local<v8::Value> value)530 ArrayBufferViewContents<T, S>::ArrayBufferViewContents(
531 v8::Local<v8::Value> value) {
532 DCHECK(value->IsArrayBufferView() || value->IsSharedArrayBuffer() ||
533 value->IsArrayBuffer());
534 ReadValue(value);
535 }
536
537 template <typename T, size_t S>
ArrayBufferViewContents(v8::Local<v8::Object> value)538 ArrayBufferViewContents<T, S>::ArrayBufferViewContents(
539 v8::Local<v8::Object> value) {
540 CHECK(value->IsArrayBufferView());
541 Read(value.As<v8::ArrayBufferView>());
542 }
543
544 template <typename T, size_t S>
ArrayBufferViewContents(v8::Local<v8::ArrayBufferView> abv)545 ArrayBufferViewContents<T, S>::ArrayBufferViewContents(
546 v8::Local<v8::ArrayBufferView> abv) {
547 Read(abv);
548 }
549
550 template <typename T, size_t S>
Read(v8::Local<v8::ArrayBufferView> abv)551 void ArrayBufferViewContents<T, S>::Read(v8::Local<v8::ArrayBufferView> abv) {
552 static_assert(sizeof(T) == 1, "Only supports one-byte data at the moment");
553 length_ = abv->ByteLength();
554 if (length_ > sizeof(stack_storage_) || abv->HasBuffer()) {
555 data_ = static_cast<T*>(abv->Buffer()->Data()) + abv->ByteOffset();
556 } else {
557 abv->CopyContents(stack_storage_, sizeof(stack_storage_));
558 data_ = stack_storage_;
559 }
560 }
561
562 template <typename T, size_t S>
ReadValue(v8::Local<v8::Value> buf)563 void ArrayBufferViewContents<T, S>::ReadValue(v8::Local<v8::Value> buf) {
564 static_assert(sizeof(T) == 1, "Only supports one-byte data at the moment");
565 DCHECK(buf->IsArrayBufferView() || buf->IsSharedArrayBuffer() ||
566 buf->IsArrayBuffer());
567
568 if (buf->IsArrayBufferView()) {
569 Read(buf.As<v8::ArrayBufferView>());
570 } else if (buf->IsArrayBuffer()) {
571 auto ab = buf.As<v8::ArrayBuffer>();
572 length_ = ab->ByteLength();
573 data_ = static_cast<T*>(ab->Data());
574 was_detached_ = ab->WasDetached();
575 } else {
576 CHECK(buf->IsSharedArrayBuffer());
577 auto sab = buf.As<v8::SharedArrayBuffer>();
578 length_ = sab->ByteLength();
579 data_ = static_cast<T*>(sab->Data());
580 }
581 }
582
583 // ECMA262 20.1.2.5
IsSafeJsInt(v8::Local<v8::Value> v)584 inline bool IsSafeJsInt(v8::Local<v8::Value> v) {
585 if (!v->IsNumber()) return false;
586 double v_d = v.As<v8::Number>()->Value();
587 if (std::isnan(v_d)) return false;
588 if (std::isinf(v_d)) return false;
589 if (std::trunc(v_d) != v_d) return false; // not int
590 if (std::abs(v_d) <= static_cast<double>(kMaxSafeJsInteger)) return true;
591 return false;
592 }
593
HashImpl(std::string_view str)594 constexpr size_t FastStringKey::HashImpl(std::string_view str) {
595 // Low-quality hash (djb2), but just fine for current use cases.
596 size_t h = 5381;
597 for (const char c : str) {
598 h = h * 33 + c;
599 }
600 return h;
601 }
602
operator()603 constexpr size_t FastStringKey::Hash::operator()(
604 const FastStringKey& key) const {
605 return key.cached_hash_;
606 }
607
608 constexpr bool FastStringKey::operator==(const FastStringKey& other) const {
609 return name_ == other.name_;
610 }
611
FastStringKey(std::string_view name)612 constexpr FastStringKey::FastStringKey(std::string_view name)
613 : name_(name), cached_hash_(HashImpl(name)) {}
614
as_string_view()615 constexpr std::string_view FastStringKey::as_string_view() const {
616 return name_;
617 }
618
619 } // namespace node
620
621 #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS
622
623 #endif // SRC_UTIL_INL_H_
624