1// Copyright 2019 the V8 project authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5// Unfortunately, MutableSlice<> is currently not a subtype of ConstSlice. 6// This would require struct subtyping, which is not yet supported. 7type MutableSlice<T: type> extends torque_internal::Slice<T, &T>; 8type ConstSlice<T: type> extends torque_internal::Slice<T, const &T>; 9 10macro Subslice<T: type>(slice: ConstSlice<T>, start: intptr, length: intptr): 11 ConstSlice<T>labels OutOfBounds { 12 if (Unsigned(length) > Unsigned(slice.length)) goto OutOfBounds; 13 if (Unsigned(start) > Unsigned(slice.length - length)) goto OutOfBounds; 14 const offset = slice.offset + torque_internal::TimesSizeOf<T>(start); 15 return torque_internal::unsafe::NewConstSlice<T>( 16 slice.object, offset, length); 17} 18macro Subslice<T: type>(slice: MutableSlice<T>, start: intptr, length: intptr): 19 MutableSlice<T>labels OutOfBounds { 20 if (Unsigned(length) > Unsigned(slice.length)) goto OutOfBounds; 21 if (Unsigned(start) > Unsigned(slice.length - length)) goto OutOfBounds; 22 const offset = slice.offset + torque_internal::TimesSizeOf<T>(start); 23 return torque_internal::unsafe::NewMutableSlice<T>( 24 slice.object, offset, length); 25} 26 27namespace unsafe { 28 29macro AddOffset<T: type>(ref: &T, offset: intptr): &T { 30 return torque_internal::unsafe::NewReference<T>( 31 ref.object, ref.offset + torque_internal::TimesSizeOf<T>(offset)); 32} 33 34macro AddOffset<T: type>(ref: const &T, offset: intptr): const &T { 35 return torque_internal::unsafe::NewReference<T>( 36 ref.object, ref.offset + torque_internal::TimesSizeOf<T>(offset)); 37} 38 39} // namespace unsafe 40 41namespace torque_internal { 42// Unsafe is a marker that we require to be passed when calling internal APIs 43// that might lead to unsoundness when used incorrectly. Unsafe markers should 44// therefore not be instantiated anywhere outside of this namespace. 45struct Unsafe {} 46 47// Size of a type in memory (on the heap). For class types, this is the size 48// of the pointer, not of the instance. 49intrinsic %SizeOf<T: type>(): constexpr int31; 50 51macro TimesSizeOf<T: type>(i: intptr): intptr { 52 return i * %SizeOf<T>(); 53} 54 55struct Reference<T: type> { 56 const object: HeapObject|TaggedZeroPattern; 57 const offset: intptr; 58 unsafeMarker: Unsafe; 59} 60type ConstReference<T: type> extends Reference<T>; 61type MutableReference<T: type> extends ConstReference<T>; 62 63namespace unsafe { 64macro NewReference<T: type>( 65 object: HeapObject|TaggedZeroPattern, offset: intptr):&T { 66 return %RawDownCast<&T>( 67 Reference<T>{object: object, offset: offset, unsafeMarker: Unsafe {}}); 68} 69macro NewOffHeapReference<T: type>(ptr: RawPtr<T>):&T { 70 return %RawDownCast<&T>(Reference<T>{ 71 object: kZeroBitPattern, 72 offset: Convert<intptr>(Convert<RawPtr>(ptr)) + kHeapObjectTag, 73 unsafeMarker: Unsafe {} 74 }); 75} 76macro ReferenceCast<T: type, U: type>(ref:&U):&T { 77 const ref = NewReference<T>(ref.object, ref.offset); 78 UnsafeCast<T>(*ref); 79 return ref; 80} 81 82extern macro GCUnsafeReferenceToRawPtr( 83 HeapObject | TaggedZeroPattern, intptr): RawPtr; 84 85} // namespace unsafe 86 87struct Slice<T: type, Reference: type> { 88 macro TryAtIndex(index: intptr): Reference labels OutOfBounds { 89 if (Convert<uintptr>(index) < Convert<uintptr>(this.length)) { 90 return this.UncheckedAtIndex(index); 91 } else { 92 goto OutOfBounds; 93 } 94 } 95 macro UncheckedAtIndex(index: intptr): Reference { 96 return unsafe::NewReference<T>( 97 this.object, this.offset + TimesSizeOf<T>(index)); 98 } 99 100 macro AtIndex(index: intptr): Reference { 101 return this.TryAtIndex(index) otherwise unreachable; 102 } 103 104 macro AtIndex(index: uintptr): Reference { 105 return this.TryAtIndex(Convert<intptr>(index)) otherwise unreachable; 106 } 107 108 macro AtIndex(index: constexpr IntegerLiteral): Reference { 109 return this.AtIndex(FromConstexpr<uintptr>(index)); 110 } 111 112 macro AtIndex(index: constexpr int31): Reference { 113 const i: intptr = Convert<intptr>(index); 114 return this.TryAtIndex(i) otherwise unreachable; 115 } 116 117 macro AtIndex(index: Smi): Reference { 118 const i: intptr = Convert<intptr>(index); 119 return this.TryAtIndex(i) otherwise unreachable; 120 } 121 122 macro Iterator(): SliceIterator<T, Reference> { 123 const end = this.offset + TimesSizeOf<T>(this.length); 124 return SliceIterator<T, Reference>{ 125 object: this.object, 126 start: this.offset, 127 end: end, 128 unsafeMarker: Unsafe {} 129 }; 130 } 131 macro Iterator( 132 startIndex: intptr, endIndex: intptr): SliceIterator<T, Reference> { 133 check( 134 Convert<uintptr>(endIndex) <= Convert<uintptr>(this.length) && 135 Convert<uintptr>(startIndex) <= Convert<uintptr>(endIndex)); 136 const start = this.offset + TimesSizeOf<T>(startIndex); 137 const end = this.offset + TimesSizeOf<T>(endIndex); 138 return SliceIterator<T, Reference>{ 139 object: this.object, 140 start, 141 end, 142 unsafeMarker: Unsafe {} 143 }; 144 } 145 146 // WARNING: This can return a raw pointer into the heap, which is not GC-safe. 147 macro GCUnsafeStartPointer(): RawPtr<T> { 148 return %RawDownCast<RawPtr<T>>( 149 unsafe::GCUnsafeReferenceToRawPtr(this.object, this.offset)); 150 } 151 152 const object: HeapObject|TaggedZeroPattern; 153 const offset: intptr; 154 const length: intptr; 155 unsafeMarker: Unsafe; 156} 157 158namespace unsafe { 159 160macro NewMutableSlice<T: type>( 161 object: HeapObject|TaggedZeroPattern, offset: intptr, 162 length: intptr): MutableSlice<T> { 163 return %RawDownCast<MutableSlice<T>>(Slice<T, &T>{ 164 object: object, 165 offset: offset, 166 length: length, 167 unsafeMarker: Unsafe {} 168 }); 169} 170 171macro NewConstSlice<T: type>( 172 object: HeapObject|TaggedZeroPattern, offset: intptr, 173 length: intptr): ConstSlice<T> { 174 return %RawDownCast<ConstSlice<T>>(Slice<T, const &T>{ 175 object: object, 176 offset: offset, 177 length: length, 178 unsafeMarker: Unsafe {} 179 }); 180} 181 182macro NewOffHeapConstSlice<T: type>( 183 startPointer: RawPtr<T>, length: intptr): ConstSlice<T> { 184 return %RawDownCast<ConstSlice<T>>(Slice<T, const &T>{ 185 object: kZeroBitPattern, 186 offset: Convert<intptr>(Convert<RawPtr>(startPointer)) + kHeapObjectTag, 187 length: length, 188 unsafeMarker: Unsafe {} 189 }); 190} 191 192} // namespace unsafe 193 194struct SliceIterator<T: type, Reference: type> { 195 macro Empty(): bool { 196 return this.start == this.end; 197 } 198 199 macro Next(): T labels NoMore { 200 return *this.NextReference() otherwise NoMore; 201 } 202 203 macro NextReference(): Reference labels NoMore { 204 if (this.Empty()) { 205 goto NoMore; 206 } else { 207 const result = unsafe::NewReference<T>(this.object, this.start); 208 this.start += %SizeOf<T>(); 209 return result; 210 } 211 } 212 213 object: HeapObject|TaggedZeroPattern; 214 start: intptr; 215 end: intptr; 216 unsafeMarker: Unsafe; 217} 218 219macro AddIndexedFieldSizeToObjectSize( 220 baseSize: intptr, arrayLength: intptr, fieldSize: constexpr int32): intptr { 221 const arrayLength = Convert<int32>(arrayLength); 222 const byteLength = TryInt32Mul(arrayLength, fieldSize) 223 otherwise unreachable; 224 return TryIntPtrAdd(baseSize, Convert<intptr>(byteLength)) 225 otherwise unreachable; 226} 227 228macro AlignTagged(x: intptr): intptr { 229 // Round up to a multiple of kTaggedSize. 230 return (x + kObjectAlignmentMask) & ~kObjectAlignmentMask; 231} 232 233macro IsTaggedAligned(x: intptr): bool { 234 return (x & kObjectAlignmentMask) == 0; 235} 236 237macro ValidAllocationSize(sizeInBytes: intptr, map: Map): bool { 238 if (sizeInBytes <= 0) return false; 239 if (!IsTaggedAligned(sizeInBytes)) return false; 240 const instanceSizeInWords = Convert<intptr>(map.instance_size_in_words); 241 return instanceSizeInWords == kVariableSizeSentinel || 242 instanceSizeInWords * kTaggedSize == sizeInBytes; 243} 244 245type UninitializedHeapObject extends HeapObject; 246 247extern macro GetInstanceTypeMap(constexpr InstanceType): Map; 248extern macro Allocate( 249 intptr, constexpr AllocationFlag): UninitializedHeapObject; 250 251const kAllocateBaseFlags: constexpr AllocationFlag = 252 AllocationFlag::kAllowLargeObjectAllocation; 253macro AllocateFromNew( 254 sizeInBytes: intptr, map: Map, pretenured: bool): UninitializedHeapObject { 255 dcheck(ValidAllocationSize(sizeInBytes, map)); 256 if (pretenured) { 257 return Allocate( 258 sizeInBytes, 259 %RawConstexprCast<constexpr AllocationFlag>( 260 %RawConstexprCast<constexpr int32>(kAllocateBaseFlags) | 261 %RawConstexprCast<constexpr int32>(AllocationFlag::kPretenured))); 262 } else { 263 return Allocate(sizeInBytes, kAllocateBaseFlags); 264 } 265} 266 267macro InitializeFieldsFromIterator<T: type, Iterator: type>( 268 target: MutableSlice<T>, originIterator: Iterator): void { 269 let targetIterator = target.Iterator(); 270 let originIterator = originIterator; 271 while (true) { 272 const ref:&T = targetIterator.NextReference() otherwise break; 273 *ref = originIterator.Next() otherwise unreachable; 274 } 275} 276// Dummy implementations: do not initialize for UninitializedIterator. 277InitializeFieldsFromIterator<char8, UninitializedIterator>( 278 _target: MutableSlice<char8>, 279 _originIterator: UninitializedIterator): void {} 280InitializeFieldsFromIterator<char16, UninitializedIterator>( 281 _target: MutableSlice<char16>, 282 _originIterator: UninitializedIterator): void {} 283 284extern macro IsDoubleHole(HeapObject, intptr): bool; 285extern macro StoreDoubleHole(HeapObject, intptr): void; 286 287macro LoadFloat64OrHole(r:&float64_or_hole): float64_or_hole { 288 return float64_or_hole{ 289 is_hole: IsDoubleHole( 290 %RawDownCast<HeapObject>(r.object), r.offset - kHeapObjectTag), 291 value: *unsafe::NewReference<float64>(r.object, r.offset) 292 }; 293} 294macro StoreFloat64OrHole(r:&float64_or_hole, value: float64_or_hole): void { 295 if (value.is_hole) { 296 StoreDoubleHole( 297 %RawDownCast<HeapObject>(r.object), r.offset - kHeapObjectTag); 298 } else { 299 *unsafe::NewReference<float64>(r.object, r.offset) = value.value; 300 } 301} 302 303macro DownCastForTorqueClass<T : type extends HeapObject>(o: HeapObject): 304 T labels CastError { 305 const map = o.map; 306 const minInstanceType = %MinInstanceType<T>(); 307 const maxInstanceType = %MaxInstanceType<T>(); 308 if constexpr (minInstanceType == maxInstanceType) { 309 if constexpr (%ClassHasMapConstant<T>()) { 310 if (map != %GetClassMapConstant<T>()) goto CastError; 311 } else { 312 if (map.instance_type != minInstanceType) goto CastError; 313 } 314 } else { 315 const diff: int32 = maxInstanceType - minInstanceType; 316 const offset = Convert<int32>(Convert<uint16>(map.instance_type)) - 317 Convert<int32>(Convert<uint16>( 318 FromConstexpr<InstanceType>(minInstanceType))); 319 if (Unsigned(offset) > Unsigned(diff)) goto CastError; 320 } 321 return %RawDownCast<T>(o); 322} 323 324extern macro StaticAssert(bool, constexpr string): void; 325 326// This is for the implementation of the dot operator. In any context where the 327// dot operator is available, the correct way to get the length of an indexed 328// field x from object o is `(&o.x).length`. 329intrinsic %IndexedFieldLength<T: type>(o: T, f: constexpr string): intptr; 330 331// If field x is defined as optional, then &o.x returns a reference to the field 332// or crashes the program (unreachable) if the field is not present. Usually 333// that's the most convenient behavior, but in rare cases such as the 334// implementation of the dot operator, we may instead need to get a Slice to the 335// optional field, which is either length zero or one depending on whether the 336// field is present. This intrinsic provides Slices for both indexed fields 337// (equivalent to &o.x) and optional fields. 338intrinsic %FieldSlice<T: type, TSlice: type>( 339 o: T, f: constexpr string): TSlice; 340 341extern macro GetPendingMessage(): TheHole|JSMessageObject; 342extern macro SetPendingMessage(TheHole | JSMessageObject): void; 343 344// This is implicitly performed at the beginning of Torque catch-blocks. 345macro GetAndResetPendingMessage(): TheHole|JSMessageObject { 346 const message = GetPendingMessage(); 347 SetPendingMessage(TheHole); 348 return message; 349} 350 351} // namespace torque_internal 352 353// Indicates that an array-field should not be initialized. 354// For safety reasons, this is only allowed for untagged types. 355struct UninitializedIterator {} 356 357// %RawDownCast should *never* be used anywhere in Torque code except for 358// in Torque-based UnsafeCast operators preceeded by an appropriate 359// type dcheck() 360intrinsic %RawDownCast<To: type, From: type>(x: From): To; 361intrinsic %RawConstexprCast<To: type, From: type>(f: From): To; 362 363intrinsic %MinInstanceType<T: type>(): constexpr InstanceType; 364intrinsic %MaxInstanceType<T: type>(): constexpr InstanceType; 365 366intrinsic %ClassHasMapConstant<T: type>(): constexpr bool; 367intrinsic %GetClassMapConstant<T: type>(): Map; 368 369struct IteratorSequence<T: type, FirstIterator: type, SecondIterator: type> { 370 macro Empty(): bool { 371 return this.first.Empty() && this.second.Empty(); 372 } 373 374 macro Next(): T labels NoMore { 375 return this.first.Next() 376 otherwise return (this.second.Next() otherwise NoMore); 377 } 378 379 first: FirstIterator; 380 second: SecondIterator; 381} 382 383macro IteratorSequence<T: type, FirstIterator: type, SecondIterator: type>( 384 first: FirstIterator, second: SecondIterator): 385 IteratorSequence<T, FirstIterator, SecondIterator> { 386 return IteratorSequence<T>{first, second}; 387} 388