• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2019 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5namespace torque_internal {
6// Unsafe is a marker that we require to be passed when calling internal APIs
7// that might lead to unsoundness when used incorrectly. Unsafe markers should
8// therefore not be instantiated anywhere outside of this namespace.
9struct Unsafe {}
10
11// Size of a type in memory (on the heap). For class types, this is the size
12// of the pointer, not of the instance.
13intrinsic %SizeOf<T: type>(): constexpr int31;
14
15struct Reference<T: type> {
16  const object: HeapObject;
17  const offset: intptr;
18  unsafeMarker: Unsafe;
19}
20type ConstReference<T: type> extends Reference<T>;
21type MutableReference<T: type> extends ConstReference<T>;
22
23namespace unsafe {
24macro NewReference<T: type>(object: HeapObject, offset: intptr):&T {
25  return %RawDownCast<&T>(
26      Reference<T>{object: object, offset: offset, unsafeMarker: Unsafe {}});
27}
28macro ReferenceCast<T: type, U: type>(ref:&U):&T {
29  const ref = NewReference<T>(ref.object, ref.offset);
30  UnsafeCast<T>(*ref);
31  return ref;
32}
33}  // namespace unsafe
34
35struct Slice<T: type> {
36  macro TryAtIndex(index: intptr):&T labels OutOfBounds {
37    if (Convert<uintptr>(index) < Convert<uintptr>(this.length)) {
38      return unsafe::NewReference<T>(
39          this.object, this.offset + index * %SizeOf<T>());
40    } else {
41      goto OutOfBounds;
42    }
43  }
44
45  macro AtIndex(index: intptr):&T {
46    return this.TryAtIndex(index) otherwise unreachable;
47  }
48
49  macro AtIndex(index: uintptr):&T {
50    return this.TryAtIndex(Convert<intptr>(index)) otherwise unreachable;
51  }
52
53  macro AtIndex(index: constexpr int31):&T {
54    const i: intptr = Convert<intptr>(index);
55    return this.TryAtIndex(i) otherwise unreachable;
56  }
57
58  macro AtIndex(index: Smi):&T {
59    const i: intptr = Convert<intptr>(index);
60    return this.TryAtIndex(i) otherwise unreachable;
61  }
62
63  macro Iterator(): SliceIterator<T> {
64    const end = this.offset + this.length * %SizeOf<T>();
65    return SliceIterator<T>{
66      object: this.object,
67      start: this.offset,
68      end: end,
69      unsafeMarker: Unsafe {}
70    };
71  }
72  macro Iterator(startIndex: intptr, endIndex: intptr): SliceIterator<T> {
73    check(
74        Convert<uintptr>(endIndex) <= Convert<uintptr>(this.length) &&
75        Convert<uintptr>(startIndex) <= Convert<uintptr>(endIndex));
76    const start = this.offset + startIndex * %SizeOf<T>();
77    const end = this.offset + endIndex * %SizeOf<T>();
78    return SliceIterator<T>{
79      object: this.object,
80      start,
81      end,
82      unsafeMarker: Unsafe {}
83    };
84  }
85
86  const object: HeapObject;
87  const offset: intptr;
88  const length: intptr;
89  unsafeMarker: Unsafe;
90}
91
92macro UnsafeNewSlice<T: type>(
93    object: HeapObject, offset: intptr, length: intptr): Slice<T> {
94  return Slice<T>{
95    object: object,
96    offset: offset,
97    length: length,
98    unsafeMarker: Unsafe {}
99  };
100}
101
102struct SliceIterator<T: type> {
103  macro Empty(): bool {
104    return this.start == this.end;
105  }
106
107  macro Next(): T labels NoMore {
108    return *this.NextReference() otherwise NoMore;
109  }
110
111  macro NextReference():&T labels NoMore {
112    if (this.Empty()) {
113      goto NoMore;
114    } else {
115      const result = unsafe::NewReference<T>(this.object, this.start);
116      this.start += %SizeOf<T>();
117      return result;
118    }
119  }
120
121  object: HeapObject;
122  start: intptr;
123  end: intptr;
124  unsafeMarker: Unsafe;
125}
126
127macro AddIndexedFieldSizeToObjectSize(
128    baseSize: intptr, arrayLength: intptr, fieldSize: constexpr int32): intptr {
129  const arrayLength = Convert<int32>(arrayLength);
130  const byteLength = TryInt32Mul(arrayLength, fieldSize)
131      otherwise unreachable;
132  return TryIntPtrAdd(baseSize, Convert<intptr>(byteLength))
133      otherwise unreachable;
134}
135
136macro AlignTagged(x: intptr): intptr {
137  // Round up to a multiple of kTaggedSize.
138  return (x + kObjectAlignmentMask) & ~kObjectAlignmentMask;
139}
140
141macro IsTaggedAligned(x: intptr): bool {
142  return (x & kObjectAlignmentMask) == 0;
143}
144
145macro ValidAllocationSize(sizeInBytes: intptr, map: Map): bool {
146  if (sizeInBytes <= 0) return false;
147  if (!IsTaggedAligned(sizeInBytes)) return false;
148  const instanceSizeInWords = Convert<intptr>(map.instance_size_in_words);
149  return instanceSizeInWords == kVariableSizeSentinel ||
150      instanceSizeInWords * kTaggedSize == sizeInBytes;
151}
152
153type UninitializedHeapObject extends HeapObject;
154
155extern macro GetInstanceTypeMap(constexpr InstanceType): Map;
156extern macro Allocate(
157    intptr, constexpr AllocationFlag): UninitializedHeapObject;
158
159const kAllocateBaseFlags: constexpr AllocationFlag =
160    AllocationFlag::kAllowLargeObjectAllocation;
161macro AllocateFromNew(
162    sizeInBytes: intptr, map: Map, pretenured: bool): UninitializedHeapObject {
163  assert(ValidAllocationSize(sizeInBytes, map));
164  if (pretenured) {
165    return Allocate(
166        sizeInBytes,
167        %RawConstexprCast<constexpr AllocationFlag>(
168            kAllocateBaseFlags | AllocationFlag::kPretenured));
169  } else {
170    return Allocate(sizeInBytes, kAllocateBaseFlags);
171  }
172}
173
174macro InitializeFieldsFromIterator<T: type, Iterator: type>(
175    target: Slice<T>, originIterator: Iterator) {
176  let targetIterator = target.Iterator();
177  let originIterator = originIterator;
178  while (true) {
179    const ref:&T = targetIterator.NextReference() otherwise break;
180    *ref = originIterator.Next() otherwise unreachable;
181  }
182}
183// Dummy implementations: do not initialize for UninitializedIterator.
184InitializeFieldsFromIterator<char8, UninitializedIterator>(
185    _target: Slice<char8>, _originIterator: UninitializedIterator) {}
186InitializeFieldsFromIterator<char16, UninitializedIterator>(
187    _target: Slice<char16>, _originIterator: UninitializedIterator) {}
188
189extern macro IsDoubleHole(HeapObject, intptr): bool;
190extern macro StoreDoubleHole(HeapObject, intptr);
191
192macro LoadFloat64OrHole(r:&float64_or_hole): float64_or_hole {
193  return float64_or_hole{
194    is_hole: IsDoubleHole(r.object, r.offset - kHeapObjectTag),
195    value: *unsafe::NewReference<float64>(r.object, r.offset)
196  };
197}
198macro StoreFloat64OrHole(r:&float64_or_hole, value: float64_or_hole) {
199  if (value.is_hole) {
200    StoreDoubleHole(r.object, r.offset - kHeapObjectTag);
201  } else {
202    *unsafe::NewReference<float64>(r.object, r.offset) = value.value;
203  }
204}
205
206macro DownCastForTorqueClass<T : type extends HeapObject>(o: HeapObject):
207    T labels CastError {
208  const map = o.map;
209  const minInstanceType = %MinInstanceType<T>();
210  const maxInstanceType = %MaxInstanceType<T>();
211  if constexpr (minInstanceType == maxInstanceType) {
212    if constexpr (%ClassHasMapConstant<T>()) {
213      if (map != %GetClassMapConstant<T>()) goto CastError;
214    } else {
215      if (map.instance_type != minInstanceType) goto CastError;
216    }
217  } else {
218    const diff: int32 = maxInstanceType - minInstanceType;
219    const offset = Convert<int32>(Convert<uint16>(map.instance_type)) -
220        Convert<int32>(Convert<uint16>(
221            FromConstexpr<InstanceType>(minInstanceType)));
222    if (Unsigned(offset) > Unsigned(diff)) goto CastError;
223  }
224  return %RawDownCast<T>(o);
225}
226
227extern macro StaticAssert(bool, constexpr string);
228
229// This is for the implementation of the dot operator. In any context where the
230// dot operator is available, the correct way to get the length of an indexed
231// field x from object o is `(&o.x).length`.
232intrinsic %IndexedFieldLength<T: type>(o: T, f: constexpr string);
233
234}  // namespace torque_internal
235
236// Indicates that an array-field should not be initialized.
237// For safety reasons, this is only allowed for untagged types.
238struct UninitializedIterator {}
239
240// %RawDownCast should *never* be used anywhere in Torque code except for
241// in Torque-based UnsafeCast operators preceeded by an appropriate
242// type assert()
243intrinsic %RawDownCast<To: type, From: type>(x: From): To;
244intrinsic %RawConstexprCast<To: type, From: type>(f: From): To;
245
246intrinsic %MinInstanceType<T: type>(): constexpr InstanceType;
247intrinsic %MaxInstanceType<T: type>(): constexpr InstanceType;
248
249intrinsic %ClassHasMapConstant<T: type>(): constexpr bool;
250intrinsic %GetClassMapConstant<T: type>(): Map;
251
252struct IteratorSequence<T: type, FirstIterator: type, SecondIterator: type> {
253  macro Empty(): bool {
254    return this.first.Empty() && this.second.Empty();
255  }
256
257  macro Next(): T labels NoMore {
258    return this.first.Next()
259        otherwise return (this.second.Next() otherwise NoMore);
260  }
261
262  first: FirstIterator;
263  second: SecondIterator;
264}
265
266macro IteratorSequence<T: type, FirstIterator: type, SecondIterator: type>(
267    first: FirstIterator, second: SecondIterator):
268    IteratorSequence<T, FirstIterator, SecondIterator> {
269  return IteratorSequence<T>{first, second};
270}
271