// Copyright 2019 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Unfortunately, MutableSlice<> is currently not a subtype of ConstSlice. // This would require struct subtyping, which is not yet supported. type MutableSlice extends torque_internal::Slice; type ConstSlice extends torque_internal::Slice; macro Subslice(slice: ConstSlice, start: intptr, length: intptr): ConstSlicelabels OutOfBounds { if (Unsigned(length) > Unsigned(slice.length)) goto OutOfBounds; if (Unsigned(start) > Unsigned(slice.length - length)) goto OutOfBounds; const offset = slice.offset + torque_internal::TimesSizeOf(start); return torque_internal::unsafe::NewConstSlice( slice.object, offset, length); } macro Subslice(slice: MutableSlice, start: intptr, length: intptr): MutableSlicelabels OutOfBounds { if (Unsigned(length) > Unsigned(slice.length)) goto OutOfBounds; if (Unsigned(start) > Unsigned(slice.length - length)) goto OutOfBounds; const offset = slice.offset + torque_internal::TimesSizeOf(start); return torque_internal::unsafe::NewMutableSlice( slice.object, offset, length); } namespace unsafe { macro AddOffset(ref: &T, offset: intptr): &T { return torque_internal::unsafe::NewReference( ref.object, ref.offset + torque_internal::TimesSizeOf(offset)); } macro AddOffset(ref: const &T, offset: intptr): const &T { return torque_internal::unsafe::NewReference( ref.object, ref.offset + torque_internal::TimesSizeOf(offset)); } } // namespace unsafe namespace torque_internal { // Unsafe is a marker that we require to be passed when calling internal APIs // that might lead to unsoundness when used incorrectly. Unsafe markers should // therefore not be instantiated anywhere outside of this namespace. struct Unsafe {} // Size of a type in memory (on the heap). For class types, this is the size // of the pointer, not of the instance. intrinsic %SizeOf(): constexpr int31; macro TimesSizeOf(i: intptr): intptr { return i * %SizeOf(); } struct Reference { const object: HeapObject|TaggedZeroPattern; const offset: intptr; unsafeMarker: Unsafe; } type ConstReference extends Reference; type MutableReference extends ConstReference; namespace unsafe { macro NewReference( object: HeapObject|TaggedZeroPattern, offset: intptr):&T { return %RawDownCast<&T>( Reference{object: object, offset: offset, unsafeMarker: Unsafe {}}); } macro NewOffHeapReference(ptr: RawPtr):&T { return %RawDownCast<&T>(Reference{ object: kZeroBitPattern, offset: Convert(Convert(ptr)) + kHeapObjectTag, unsafeMarker: Unsafe {} }); } macro ReferenceCast(ref:&U):&T { const ref = NewReference(ref.object, ref.offset); UnsafeCast(*ref); return ref; } extern macro GCUnsafeReferenceToRawPtr( HeapObject | TaggedZeroPattern, intptr): RawPtr; } // namespace unsafe struct Slice { macro TryAtIndex(index: intptr): Reference labels OutOfBounds { if (Convert(index) < Convert(this.length)) { return this.UncheckedAtIndex(index); } else { goto OutOfBounds; } } macro UncheckedAtIndex(index: intptr): Reference { return unsafe::NewReference( this.object, this.offset + TimesSizeOf(index)); } macro AtIndex(index: intptr): Reference { return this.TryAtIndex(index) otherwise unreachable; } macro AtIndex(index: uintptr): Reference { return this.TryAtIndex(Convert(index)) otherwise unreachable; } macro AtIndex(index: constexpr IntegerLiteral): Reference { return this.AtIndex(FromConstexpr(index)); } macro AtIndex(index: constexpr int31): Reference { const i: intptr = Convert(index); return this.TryAtIndex(i) otherwise unreachable; } macro AtIndex(index: Smi): Reference { const i: intptr = Convert(index); return this.TryAtIndex(i) otherwise unreachable; } macro Iterator(): SliceIterator { const end = this.offset + TimesSizeOf(this.length); return SliceIterator{ object: this.object, start: this.offset, end: end, unsafeMarker: Unsafe {} }; } macro Iterator( startIndex: intptr, endIndex: intptr): SliceIterator { check( Convert(endIndex) <= Convert(this.length) && Convert(startIndex) <= Convert(endIndex)); const start = this.offset + TimesSizeOf(startIndex); const end = this.offset + TimesSizeOf(endIndex); return SliceIterator{ object: this.object, start, end, unsafeMarker: Unsafe {} }; } // WARNING: This can return a raw pointer into the heap, which is not GC-safe. macro GCUnsafeStartPointer(): RawPtr { return %RawDownCast>( unsafe::GCUnsafeReferenceToRawPtr(this.object, this.offset)); } const object: HeapObject|TaggedZeroPattern; const offset: intptr; const length: intptr; unsafeMarker: Unsafe; } namespace unsafe { macro NewMutableSlice( object: HeapObject|TaggedZeroPattern, offset: intptr, length: intptr): MutableSlice { return %RawDownCast>(Slice{ object: object, offset: offset, length: length, unsafeMarker: Unsafe {} }); } macro NewConstSlice( object: HeapObject|TaggedZeroPattern, offset: intptr, length: intptr): ConstSlice { return %RawDownCast>(Slice{ object: object, offset: offset, length: length, unsafeMarker: Unsafe {} }); } macro NewOffHeapConstSlice( startPointer: RawPtr, length: intptr): ConstSlice { return %RawDownCast>(Slice{ object: kZeroBitPattern, offset: Convert(Convert(startPointer)) + kHeapObjectTag, length: length, unsafeMarker: Unsafe {} }); } } // namespace unsafe struct SliceIterator { macro Empty(): bool { return this.start == this.end; } macro Next(): T labels NoMore { return *this.NextReference() otherwise NoMore; } macro NextReference(): Reference labels NoMore { if (this.Empty()) { goto NoMore; } else { const result = unsafe::NewReference(this.object, this.start); this.start += %SizeOf(); return result; } } object: HeapObject|TaggedZeroPattern; start: intptr; end: intptr; unsafeMarker: Unsafe; } macro AddIndexedFieldSizeToObjectSize( baseSize: intptr, arrayLength: intptr, fieldSize: constexpr int32): intptr { const arrayLength = Convert(arrayLength); const byteLength = TryInt32Mul(arrayLength, fieldSize) otherwise unreachable; return TryIntPtrAdd(baseSize, Convert(byteLength)) otherwise unreachable; } macro AlignTagged(x: intptr): intptr { // Round up to a multiple of kTaggedSize. return (x + kObjectAlignmentMask) & ~kObjectAlignmentMask; } macro IsTaggedAligned(x: intptr): bool { return (x & kObjectAlignmentMask) == 0; } macro ValidAllocationSize(sizeInBytes: intptr, map: Map): bool { if (sizeInBytes <= 0) return false; if (!IsTaggedAligned(sizeInBytes)) return false; const instanceSizeInWords = Convert(map.instance_size_in_words); return instanceSizeInWords == kVariableSizeSentinel || instanceSizeInWords * kTaggedSize == sizeInBytes; } type UninitializedHeapObject extends HeapObject; extern macro GetInstanceTypeMap(constexpr InstanceType): Map; extern macro Allocate( intptr, constexpr AllocationFlag): UninitializedHeapObject; const kAllocateBaseFlags: constexpr AllocationFlag = AllocationFlag::kAllowLargeObjectAllocation; macro AllocateFromNew( sizeInBytes: intptr, map: Map, pretenured: bool): UninitializedHeapObject { dcheck(ValidAllocationSize(sizeInBytes, map)); if (pretenured) { return Allocate( sizeInBytes, %RawConstexprCast( %RawConstexprCast(kAllocateBaseFlags) | %RawConstexprCast(AllocationFlag::kPretenured))); } else { return Allocate(sizeInBytes, kAllocateBaseFlags); } } macro InitializeFieldsFromIterator( target: MutableSlice, originIterator: Iterator): void { let targetIterator = target.Iterator(); let originIterator = originIterator; while (true) { const ref:&T = targetIterator.NextReference() otherwise break; *ref = originIterator.Next() otherwise unreachable; } } // Dummy implementations: do not initialize for UninitializedIterator. InitializeFieldsFromIterator( _target: MutableSlice, _originIterator: UninitializedIterator): void {} InitializeFieldsFromIterator( _target: MutableSlice, _originIterator: UninitializedIterator): void {} extern macro IsDoubleHole(HeapObject, intptr): bool; extern macro StoreDoubleHole(HeapObject, intptr): void; macro LoadFloat64OrHole(r:&float64_or_hole): float64_or_hole { return float64_or_hole{ is_hole: IsDoubleHole( %RawDownCast(r.object), r.offset - kHeapObjectTag), value: *unsafe::NewReference(r.object, r.offset) }; } macro StoreFloat64OrHole(r:&float64_or_hole, value: float64_or_hole): void { if (value.is_hole) { StoreDoubleHole( %RawDownCast(r.object), r.offset - kHeapObjectTag); } else { *unsafe::NewReference(r.object, r.offset) = value.value; } } macro DownCastForTorqueClass(o: HeapObject): T labels CastError { const map = o.map; const minInstanceType = %MinInstanceType(); const maxInstanceType = %MaxInstanceType(); if constexpr (minInstanceType == maxInstanceType) { if constexpr (%ClassHasMapConstant()) { if (map != %GetClassMapConstant()) goto CastError; } else { if (map.instance_type != minInstanceType) goto CastError; } } else { const diff: int32 = maxInstanceType - minInstanceType; const offset = Convert(Convert(map.instance_type)) - Convert(Convert( FromConstexpr(minInstanceType))); if (Unsigned(offset) > Unsigned(diff)) goto CastError; } return %RawDownCast(o); } extern macro StaticAssert(bool, constexpr string): void; // This is for the implementation of the dot operator. In any context where the // dot operator is available, the correct way to get the length of an indexed // field x from object o is `(&o.x).length`. intrinsic %IndexedFieldLength(o: T, f: constexpr string): intptr; // If field x is defined as optional, then &o.x returns a reference to the field // or crashes the program (unreachable) if the field is not present. Usually // that's the most convenient behavior, but in rare cases such as the // implementation of the dot operator, we may instead need to get a Slice to the // optional field, which is either length zero or one depending on whether the // field is present. This intrinsic provides Slices for both indexed fields // (equivalent to &o.x) and optional fields. intrinsic %FieldSlice( o: T, f: constexpr string): TSlice; extern macro GetPendingMessage(): TheHole|JSMessageObject; extern macro SetPendingMessage(TheHole | JSMessageObject): void; // This is implicitly performed at the beginning of Torque catch-blocks. macro GetAndResetPendingMessage(): TheHole|JSMessageObject { const message = GetPendingMessage(); SetPendingMessage(TheHole); return message; } } // namespace torque_internal // Indicates that an array-field should not be initialized. // For safety reasons, this is only allowed for untagged types. struct UninitializedIterator {} // %RawDownCast should *never* be used anywhere in Torque code except for // in Torque-based UnsafeCast operators preceeded by an appropriate // type dcheck() intrinsic %RawDownCast(x: From): To; intrinsic %RawConstexprCast(f: From): To; intrinsic %MinInstanceType(): constexpr InstanceType; intrinsic %MaxInstanceType(): constexpr InstanceType; intrinsic %ClassHasMapConstant(): constexpr bool; intrinsic %GetClassMapConstant(): Map; struct IteratorSequence { macro Empty(): bool { return this.first.Empty() && this.second.Empty(); } macro Next(): T labels NoMore { return this.first.Next() otherwise return (this.second.Next() otherwise NoMore); } first: FirstIterator; second: SecondIterator; } macro IteratorSequence( first: FirstIterator, second: SecondIterator): IteratorSequence { return IteratorSequence{first, second}; }