• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# plugin typed_arrays
2# Copyright (c) 2024 Huawei Device Co., Ltd.
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15include_relative 'common.irt'
16include_relative '../../../irtoc/scripts/common.irt'
17include_relative '../../../irtoc/scripts/string_helpers.irt'
18
19module Constants
20  ARRAY_BUFFER_CLASS_SIZE = "cross_values::GetArrayBufferClassSize(GetArch())"
21  ARRAY_BUFFER_BYTE_LENGTH_OFFSET =  "cross_values::GetArrayBufferByteLengthOffset(GetArch())"
22  ARRAY_BUFFER_DATA_OFFSET = "cross_values::GetArrayBufferDataOffset(GetArch())"
23  ARRAY_BUFFER_NATIVE_DATA_OFFSET = "cross_values::GetArrayBufferNativeDataOffset(GetArch())"
24  ARRAY_BUFFER_MANAGED_DATA_OFFSET = "cross_values::GetArrayBufferManagedDataOffset(GetArch())"
25  ARRAY_BUFFER_IS_RESIZABLE_OFFSET = "cross_values::GetArrayBufferIsResizableOffset(GetArch())"
26  ARRAY_BUFFER_CLASS_SIZE_WITH_ALIGNMENT = "cross_values::GetArrayBufferClassSize(GetArch()) + TLAB_ALIGNMENT - 1"
27
28  TYPED_UNSIGNED_ARRAY_BYTES_PER_ELEMENT_OFFSET = "cross_values::GetTypedUnsignedArrayBytesPerElementOffset(GetArch())"
29  TYPED_UNSIGNED_ARRAY_BYTE_OFFSET_OFFSET = "cross_values::GetTypedUnsignedArrayByteOffsetOffset(GetArch())"
30  TYPED_UNSIGNED_ARRAY_BYTE_LENGTH_OFFSET = "cross_values::GetTypedUnsignedArrayByteLengthOffset(GetArch())"
31  TYPED_UNSIGNED_ARRAY_LENGTH_INT_OFFSET = "cross_values::GetTypedUnsignedArrayLengthIntOffset(GetArch())"
32  TYPED_UNSIGNED_ARRAY_CLASS_SIZE = "cross_values::GetTypedUnsignedArrayClassSize(GetArch())"
33  TYPED_UNSIGNED_ARRAY_BUFFER_OFFSET = "cross_values::GetTypedUnsignedArrayBufferOffset(GetArch())"
34  TYPED_UNSIGNED_ARRAY_CLASS_SIZE_WITH_ALIGNMENT = "cross_values::GetTypedUnsignedArrayClassSize(GetArch()) + TLAB_ALIGNMENT - 1"
35
36  TYPED_ARRAY_BYTES_PER_ELEMENT_OFFSET = "cross_values::GetTypedArrayBytesPerElementOffset(GetArch())"
37  TYPED_ARRAY_BYTE_OFFSET_OFFSET = "cross_values::GetTypedArrayByteOffsetOffset(GetArch())"
38  TYPED_ARRAY_BYTE_LENGTH_OFFSET = "cross_values::GetTypedArrayByteLengthOffset(GetArch())"
39  TYPED_ARRAY_LENGTH_INT_OFFSET = "cross_values::GetTypedArrayLengthIntOffset(GetArch())"
40  TYPED_ARRAY_CLASS_SIZE = "cross_values::GetTypedArrayClassSize(GetArch())"
41  TYPED_ARRAY_BUFFER_OFFSET = "cross_values::GetTypedArrayBufferOffset(GetArch())"
42  TYPED_ARRAY_CLASS_SIZE_WITH_ALIGNMENT = "cross_values::GetTypedArrayClassSize(GetArch()) + TLAB_ALIGNMENT - 1"
43end
44
45
46def GenerateTypedArrayFillInternal(name, prefix, type, scale)
47    function("#{name}ArrayFillInternalFastPath".to_sym,
48            params: {arr: 'ref', val: type, startPos: 'i32', endPos: 'i32'},
49            regmap: $full_regmap,
50            regalloc_set: $panda_mask,
51            mode: [:FastPath]) {
52
53        if Options.arch == :arm32
54            Intrinsic(:UNREACHABLE).Terminator.void
55            next
56        end
57        buffer := LoadI(arr).Imm(Constants::TYPED_ARRAY_BUFFER_OFFSET).ref
58        bufferData := LoadI(buffer).Imm(Constants::ARRAY_BUFFER_DATA_OFFSET).ref
59        If(bufferData, 0).EQ.Unlikely {
60          Goto(:SlowPathEntrypoint)
61        }
62        byteOffsetF64 := LoadI(arr).Imm(Constants::TYPED_ARRAY_BYTE_OFFSET_OFFSET).f64
63        byteOffset := Cast(byteOffsetF64).i32
64        arrayDataOffset := AddI(byteOffset).Imm(Constants::ARRAY_DATA_OFFSET).i32
65        offset0 := Add(arrayDataOffset, ShlI(startPos).Imm(scale).i32).i32
66        offsetEnd := Add(arrayDataOffset, ShlI(endPos).Imm(scale).i32).i32
67
68      Label(:Loop)
69        offset := Phi(offset0, offset1).u32
70        If(offset, offsetEnd).GE {
71            Goto(:End)
72        }
73        Store(bufferData, offset, val).send(type)
74        offset1 := AddI(offset).Imm(1 << scale).i32
75        Goto(:Loop)
76
77      Label(:End)
78        ReturnVoid().void
79      Label(:SlowPathEntrypoint)
80        ep_offset = get_entrypoint_offset("#{prefix}_ARRAY_FILL_INTERNAL_USUAL")
81        Intrinsic(:SLOW_PATH_ENTRY, arr, val, startPos, endPos).AddImm(ep_offset).MethodAsImm("#{name}ArrayFillInternalUsualBridge").Terminator.void
82        Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
83    }
84end
85
86GenerateTypedArrayFillInternal('Int8', 'INT8', 'i8', 0)
87GenerateTypedArrayFillInternal('Int16', 'INT16', 'i16', 1)
88GenerateTypedArrayFillInternal('Int32', 'INT32', 'i32', 2)
89GenerateTypedArrayFillInternal('BigInt64', 'BIG_INT64', 'i64', 3)
90GenerateTypedArrayFillInternal('Float32', 'FLOAT32', 'i32', 2)
91GenerateTypedArrayFillInternal('Float64', 'FLOAT64', 'i64', 3)
92
93def GenerateTypedUnsignedArrayFillInternal(name, prefix, inType, outType, scale)
94    function("#{name}ArrayFillInternalFastPath".to_sym,
95            params: {arr: 'ref', val: inType, startPos: 'i32', endPos: 'i32'},
96            regmap: $full_regmap,
97            regalloc_set: $panda_mask,
98            mode: [:FastPath]) {
99
100        if Options.arch == :arm32
101            Intrinsic(:UNREACHABLE).Terminator.void
102            next
103        end
104        buffer := LoadI(arr).Imm(Constants::TYPED_UNSIGNED_ARRAY_BUFFER_OFFSET).ref
105        bufferData := LoadI(buffer).Imm(Constants::ARRAY_BUFFER_DATA_OFFSET).ref
106        If(bufferData, 0).EQ.Unlikely {
107          Goto(:SlowPathEntrypoint)
108        }
109        byteOffset := LoadI(arr).Imm(Constants::TYPED_ARRAY_BYTE_OFFSET_OFFSET).i32
110        arrayDataOffset := AddI(byteOffset).Imm(Constants::ARRAY_DATA_OFFSET).i32
111        offset0 := Add(arrayDataOffset, ShlI(startPos).Imm(scale).i32).i32
112        offsetEnd := Add(arrayDataOffset, ShlI(endPos).Imm(scale).i32).i32
113
114      Label(:Loop)
115        offset := Phi(offset0, offset1).u32
116        If(offset, offsetEnd).GE {
117            Goto(:End)
118        }
119        Store(bufferData, offset, val).send(outType)
120        offset1 := AddI(offset).Imm(1 << scale).i32
121        Goto(:Loop)
122
123      Label(:End)
124        ReturnVoid().void
125      Label(:SlowPathEntrypoint)
126        ep_offset = get_entrypoint_offset("#{prefix}_ARRAY_FILL_INTERNAL_USUAL")
127        Intrinsic(:SLOW_PATH_ENTRY, arr, val, startPos, endPos).AddImm(ep_offset).MethodAsImm("#{name}ArrayFillInternalUsualBridge").Terminator.void
128        Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
129    }
130end
131
132GenerateTypedUnsignedArrayFillInternal('UInt8Clamped', 'U_INT8', 'i32', 'u8', 0)
133GenerateTypedUnsignedArrayFillInternal('UInt8', 'U_INT8', 'i32', 'u8', 0)
134GenerateTypedUnsignedArrayFillInternal('UInt16', 'U_INT16', 'i32', 'u16', 1)
135GenerateTypedUnsignedArrayFillInternal('UInt32', 'U_INT32', 'i64', 'u32', 2)
136GenerateTypedUnsignedArrayFillInternal('BigUInt64', 'BIG_U_INT64', 'i64', 'u64', 3)
137
138# Try to allocate object in TLAB.
139# The result is either a pointer to a new object or null if there is no enough space in TLAB.
140macro(:allocate_object_tlab) do |klass, klass_size, data_size|
141    if Options.arch == :arm32
142      Intrinsic(:UNREACHABLE).Terminator.void
143      ReturnVoid().void
144      next
145    end
146
147    _data_size := Cast(data_size).word
148    _klass_size_align := AddI(Cast(klass_size).word).Imm("TLAB_ALIGNMENT - 1").word
149    _size_0 := AndI(_klass_size_align).Imm(Constants::ALIGNMENT_MASK).word
150    If(_data_size, Cast(0).word).NE.Unlikely {
151        _size_1 := Add(Add(_data_size, Cast(klass_size).word).word, "DEFAULT_ALIGNMENT_IN_BYTES - 1").word
152        _size_1 := And(_size_1, "(~(DEFAULT_ALIGNMENT_IN_BYTES - 1))").word
153    }
154    _size := Phi(_size_0, _size_1).word
155
156    # Load pointer to the TLAB from TLS
157    _tlab := LoadI(%tr).Imm(Constants::TLAB_OFFSET).ptr
158    # Load pointer to the start address of free memory in the TLAB
159    _start := LoadI(_tlab).Imm(Constants::TLAB_CUR_FREE_POSITION_OFFSET).ptr
160    # Load pointer to the end address of free memory in the TLAB
161    _end := LoadI(_tlab).Imm(Constants::TLAB_MEMORY_END_ADDR_OFFSET).ptr
162    # Check if there is enough space
163    If(Sub(_end, _start).word, _size).B.Unlikely {
164      Goto(:SlowPathEntrypoint)
165    }
166    Intrinsic(:WRITE_TLAB_STATS_SAFE, _start, _size, Cast(-1).u64).void if defines.DEBUG
167    if defines.__SANITIZE_ADDRESS__ || defines.__SANITIZE_THREAD__
168      call_runtime_save_all(Constants::ANNOTATE_SANITIZERS_NO_BRIDGE, _start, _size).void
169    end
170    # Store class of the object
171    StoreI(_start, klass).Imm(Constants::OBJECT_CLASS_OFFSET).ref
172    # Update the TLAB state
173    StoreI(Add(_tlab, Constants::TLAB_CUR_FREE_POSITION_OFFSET).ptr, Add(_start, _size).ptr).Imm(0).Volatile.ptr
174    # Return a pointer to the newly allocated string
175    _allocated_object := _start
176end
177
178
179def GenerateToReversed(type)
180  case type
181  when "Uint8"
182    prefix = "TYPED_UNSIGNED"
183  when "Uint16"
184    prefix = "TYPED_UNSIGNED"
185  when "Uint32"
186    prefix = "TYPED_UNSIGNED"
187  when "BigUint64"
188    prefix = "TYPED_UNSIGNED"
189  else
190    prefix = "TYPED"
191  end
192  case type
193    when "Int8"
194      suffix = "i8"
195    when "Int16"
196      suffix = "i16"
197    when "Int32"
198      suffix = "i32"
199    when "BigInt64"
200      suffix = "i64"
201    when "Float32"
202      suffix = "f32"
203    when "Float64"
204      suffix = "f64"
205    when "Uint8"
206      suffix = "u8"
207    when "Uint16"
208      suffix = "u16"
209    when "Uint32"
210      suffix = "u32"
211    when "BigUint64"
212      suffix = "u64"
213    else
214      raise "Unexpected type: #{type}"
215  end
216  case type
217    when "BigInt64"
218      macroType = "BIG_INT64"
219    when "BigUint64"
220      macroType = "BIG_UINT64"
221  else
222    macroType = type.upcase
223  end
224  function("#{type}ArrayToReversedTlab".to_sym,
225      params: {typed_array: 'ref'},
226      regmap: $full_regmap,
227      regalloc_set: $panda_mask,
228      mode: [:FastPath]) {
229
230    # not supported
231    if Options.arch == :arm32
232      Intrinsic(:UNREACHABLE).Terminator.void
233      next
234    end
235
236    # load TypedArray params
237    eval("byte_offset := Cast(LoadI(typed_array).Imm(Constants::#{prefix}_ARRAY_BYTE_OFFSET_OFFSET).f64).u32")
238    eval("elm_size := Cast(LoadI(typed_array).Imm(Constants::#{prefix}_ARRAY_BYTES_PER_ELEMENT_OFFSET).f64).u32")
239    eval("arr_len := Cast(LoadI(typed_array).Imm(Constants::#{prefix}_ARRAY_LENGTH_INT_OFFSET).i32).u32")
240    array_byte_len := Mul(arr_len, elm_size).u32
241
242    # new Typed Array
243    klass := LoadI(typed_array).Imm(Constants::OBJECT_CLASS_OFFSET).ref
244    eval("new_typed_array := allocate_object_tlab(klass, Constants::#{prefix}_ARRAY_CLASS_SIZE, 0)")
245    eval("copy_u8_chars(typed_array, new_typed_array, Constants::#{prefix}_ARRAY_CLASS_SIZE)")
246    eval("StoreI(new_typed_array, Cast(0).f64).Imm(Constants::#{prefix}_ARRAY_BYTE_OFFSET_OFFSET).f64")
247
248    # new ArrayBuffer
249    eval("array_buffer := LoadI(typed_array).Imm(Constants::#{prefix}_ARRAY_BUFFER_OFFSET).ref")
250    array_buffer_klass := LoadI(array_buffer).Imm(Constants::OBJECT_CLASS_OFFSET).ref
251    new_array_buffer := allocate_object_tlab(array_buffer_klass, Constants::ARRAY_BUFFER_CLASS_SIZE, 0)
252    eval("StoreI(new_typed_array, new_array_buffer).Imm(Constants::#{prefix}_ARRAY_BUFFER_OFFSET).ptr")
253
254    # new Array
255    array := LoadI(array_buffer).Imm(Constants::ARRAY_BUFFER_DATA_OFFSET).ref
256    array_klass := LoadI(array).Imm(Constants::OBJECT_CLASS_OFFSET).ref
257    new_packet := allocate_object_tlab(array_klass, Constants::ARRAY_CLASS_SIZE, array_byte_len)
258    StoreI(new_packet, array_byte_len).Imm(Constants::ARRAY_LENGTH_OFFSET).u32
259    src_data := Add(AddI(array).Imm(Constants::ARRAY_DATA_OFFSET).ptr, byte_offset).ptr
260    dst_data := AddI(new_packet).Imm(Constants::ARRAY_DATA_OFFSET).ptr
261    copy_u8_chars(src_data, dst_data, array_byte_len)
262
263    StoreI(new_array_buffer, new_packet).Imm(Constants::ARRAY_BUFFER_DATA_OFFSET).ptr
264    StoreI(new_array_buffer, dst_data).Imm(Constants::ARRAY_BUFFER_NATIVE_DATA_OFFSET).ptr
265    StoreI(new_array_buffer, array_byte_len).Imm(Constants::ARRAY_BUFFER_BYTE_LENGTH_OFFSET).u32
266    StoreI(new_array_buffer, Cast(0).u8).Imm(Constants::ARRAY_BUFFER_IS_RESIZABLE_OFFSET).u8
267
268    # fill new buffer in reversed order
269    src_ofs_0 := Sub(array_byte_len, elm_size).u32
270    dst_ofs_0 := 0
271  Label(:CopyLoop)
272    src_ofs := Phi(src_ofs_0, src_ofs_1).u32
273    dst_ofs := Phi(dst_ofs_0, dst_ofs_1).u32
274
275    If(dst_ofs, array_byte_len).EQ.Unlikely {
276        Goto(:End)
277    }
278    eval("Store(dst_data, dst_ofs, Load(src_data, src_ofs).#{suffix}).#{suffix}")
279    src_ofs_1 := Sub(src_ofs, elm_size).u32
280    dst_ofs_1 := Add(dst_ofs, elm_size).u32
281    Goto(:CopyLoop)
282
283  Label(:End)
284    Return(new_typed_array).ptr
285
286  Label(:SlowPathEntrypoint)
287    ep_offset = get_entrypoint_offset("#{macroType}_ARRAY_TO_REVERSED_SLOW_PATH")
288    Intrinsic(:SLOW_PATH_ENTRY, typed_array).AddImm(ep_offset).MethodAsImm("#{type}ArrayToReversedOddSavedBridge").Terminator.ptr
289    Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
290  }
291end
292
293GenerateToReversed('Int8')
294GenerateToReversed('Int16')
295GenerateToReversed('Int32')
296GenerateToReversed('BigInt64')
297GenerateToReversed('Float32')
298GenerateToReversed('Float64')
299GenerateToReversed('Uint8')
300GenerateToReversed('Uint16')
301GenerateToReversed('Uint32')
302GenerateToReversed('BigUint64')
303