• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env ruby
2
3# Copyright (c) 2021-2022 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16include_relative 'common.irt'
17
18fixed_regmap = Regmap.new({
19  arm32: { dispatch: 12, pc: 4, frame: 8, acc: 7 },
20  arm64: { dispatch: 24, pc: 20, frame: 23, acc: 21, acc_tag: 22, moffset: 25, method_ptr: 26 },
21  x86_64: { dispatch: 8, pc: 4, frame: 5, acc: 11, acc_tag: 3 },
22})
23handler_regmap = $full_regmap + fixed_regmap
24
25def check_regmap(lhs, rhs, name)
26  regs_intersection = lhs.data.values & rhs.data.values
27  raise "Fixed register numbers should not intersect with '#{name}' registers" unless regs_intersection.empty?
28end
29
30if Options.arm64?  # other archs have no enough regs
31  # fixed registers assignment sanity checks:
32  check_regmap(fixed_regmap, $panda_regmap, 'panda')
33  check_regmap(fixed_regmap, $arch_regmap, 'arch')
34  check_regmap(fixed_regmap, $args_regmap, 'args')
35  check_regmap(fixed_regmap, $callers_regmap, 'caller')
36end
37
38InterpreterValidation = {
39  spills_count_max: 32  # should be synced with SPILL_SLOTS in codegen_interpreter.h
40}
41
42# Macros:
43
44# Casts:
45
46['8', '16'].each do |from|
47  ['u32', 'u64'].each do |to|
48    macro(:"u#{from}to#{to}") do |arg|
49      Cast(arg).SrcType("DataType::UINT#{from}").send(to)
50    end
51  end
52end
53
54['8', '16'].each do |from|
55  macro(:"i#{from}toi32") do |arg|
56    Cast(arg).SrcType("DataType::INT#{from}").i32
57  end
58end
59
60['8', '16'].each do |from|
61  macro(:"i#{from}toi64") do |arg|
62    Cast(arg).SrcType("DataType::INT#{from}").i64
63  end
64end
65
66
67[['u32', 'UINT32'], ['i32', 'INT32']].each do |from, from_type|
68  ['b', 'i8', 'u8', 'i16', 'u16', 'i64', 'u64'].each do |to|
69    macro(:"#{from}to#{to}") do |arg|
70      Cast(arg).SrcType("DataType::#{from_type}").send(to)
71    end
72  end
73end
74
75['b', 'u32', 'i32', 'u8', 'i8', 'i16', 'u16', 'i64'].each do |to|
76  macro(:"u64to#{to}") do |arg|
77    Cast(arg).SrcType("DataType::UINT64").send(to)
78  end
79end
80
81['b', 'i32'].each do |to|
82  macro(:"i64to#{to}") do |arg|
83    Cast(arg).SrcType("DataType::INT64").send(to)
84  end
85end
86
87macro(:"btou32") do |arg|
88    Cast(arg).SrcType("DataType::BOOL").send('u32')
89end
90
91[['u32', 'UINT32'], ['i32', 'INT32'], ['u64', 'UINT64'], ['i64', 'INT64']].each do |from, from_type|
92  ['f32', 'f64'].each do |to|
93    macro(:"#{from}to#{to}") do |arg|
94      Cast(arg).SrcType("DataType::#{from_type}").send(to)
95    end
96  end
97end
98
99['f64', 'i32', 'u32', 'i64', 'u64'].each do |to|
100  macro(:"f32to#{to}") do |arg|
101    Cast(arg).SrcType("DataType::FLOAT32").send(to)
102  end
103end
104
105['i32', 'u32', 'i64', 'u64', 'f32'].each do |to|
106  macro(:"f64to#{to}") do |arg|
107    Cast(arg).SrcType("DataType::FLOAT64").send(to)
108  end
109end
110
111['i32', 'i64', 'u32', 'u64'].each do |from|
112  macro(:"#{from}tou1") do |arg|
113    res0 := 0
114    If(arg, 0).NE do
115      res1 := 1
116    end
117    Phi(res0, res1).i32
118  end
119end
120
121['u8', 'u16'].each do |from|
122  macro(:"#{from}toword") do |arg|
123    if Options.arch_64_bits?
124      send(:"#{from}tou64", arg)
125    else
126      send(:"#{from}tou32", arg)
127    end
128  end
129end
130
131macro(:u32toword) do |arg|
132  if Options.arch_64_bits?
133    u32tou64(arg)
134  else
135    arg
136  end
137end
138
139macro(:bitcast_to_ref) do |value|
140  set_no_hoist_flag(Bitcast(value).SrcType(Constants::REF_UINT).ref)
141end
142
143# to be redefined in plugins
144macro(:i32toany) do |arg|
145  OrI(i32toi64(arg)).Imm("panda::coretypes::TaggedValue::TAG_INT").i64
146end
147
148macro(:f64toany) do |arg|
149  CastValueToAnyType(arg).AnyType(Constants::DYN_UNDEFINED).any
150end
151
152# Decoding
153
154macro(:readbyte) do |pc, offset|
155  LoadI(pc).Imm(offset).u8
156end
157
158macro(:read_lower_4bits) do |offset|
159  if Options.arm64?
160    imm := readbyte(pc, offset).u32
161    AndI(imm).Imm(0xf).u8
162  else
163    imm := readbyte(pc, offset).u8
164    AndI(imm).Imm(0xf).u8
165  end
166end
167
168macro(:signed_read_higher_4bits) do |offset|
169  if Options.arm64?
170    imm := readbyte(pc, offset).i32
171    shl_imm := ShlI(imm).Imm(24).i32
172    i32toi8(AShrI(shl_imm).Imm(28).i32)
173  else
174    imm := readbyte(pc, offset).i8
175    AShrI(imm).Imm(4).i8
176  end
177end
178
179macro(:read_higher_4bits) do |offset|
180  if Options.arm64?
181    imm := readbyte(pc, offset).u32
182    shl_imm := ShlI(imm).Imm(24).u32
183    u32tou8(ShrI(shl_imm).Imm(28).u32)
184  else
185    imm := readbyte(pc, offset).u8
186    ShrI(imm).Imm(4).u8
187  end
188end
189
190macro(:as_vreg_idx) do |operand|
191  raise 'Register is expected' unless operand.reg?
192
193  offset = operand.offset / 8
194  case operand.width
195  when 4
196    u8toword(operand.offset % 8 != 0 ? read_higher_4bits(offset) : read_lower_4bits(offset))
197  when 8
198    u8toword(readbyte(pc, offset))
199  when 16
200    u16toword(readbyte(pc, offset).u16)
201  end
202end
203
204macro(:as_id) do |operand|
205  raise 'ID is expected' unless operand.id?
206
207  offset = operand.offset / 8
208
209  case operand.width
210  when 16
211    readbyte(pc, offset).u16
212  when 32
213    readbyte(pc, offset).u32
214  end
215end
216
217macro(:as_imm) do |operand|
218  raise 'Immediate is expected' unless operand.imm?
219
220  offset = operand.offset / 8
221
222  case operand.width
223  when 4
224    operand.offset % 8 != 0 ? signed_read_higher_4bits(offset) : read_lower_4bits(offset)
225  when 8
226    readbyte(pc, offset).i8
227  when 16
228    readbyte(pc, offset).i16
229  when 32
230    if operand.type == 'f32'
231      readbyte(pc,offset).f32
232    else
233      readbyte(pc, offset).i32
234    end
235  when 64
236    if operand.type == 'f64'
237      readbyte(pc, offset).f64
238    else
239      readbyte(pc, offset).i64
240    end
241  end
242end
243
244macro(:ins_offset) do
245  instructions_offset := LoadI(%frame).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
246  Sub(%pc, instructions_offset).word
247end
248
249macro(:update_bytecode_offset) do
250  StoreI(%frame, ins_offset).Imm(Constants::FRAME_BYTECODE_OFFSET).u32
251end
252
253# Register access:
254
255macro(:frame_vreg_ptr) do |frame, vreg_idx|
256  vreg_offset := AddI(Mul(vreg_idx, Constants::VREGISTER_SIZE).word).Imm(Constants::VREGISTERS_OFFSET).word
257  Add(frame, vreg_offset).ptr
258end
259
260macro(:vreg_ptr) do |operand|
261  vreg_idx := as_vreg_idx(operand)
262  frame_vreg_ptr(%frame, vreg_idx)
263end
264
265macro(:get_value) do |vreg_ptr|
266  LoadI(vreg_ptr).Imm(Constants::VREGISTER_VALUE_OFFSET)
267end
268
269macro(:set_value) do |vreg_ptr, val|
270  StoreI(vreg_ptr, val).Imm(Constants::VREGISTER_VALUE_OFFSET).send(val.is_a?(Integer) || val.is_a?(String) ? :u64 : val.type)
271end
272
273macro(:get_moffset_frame) do |frame|
274  vreg_num := LoadI(frame).Imm(Constants::VREGISTERS_NUM_OFFSET).u32
275  Mul(u32toword(vreg_num), Constants::VREGISTER_SIZE).word
276end
277
278macro(:get_moffset) do
279  Options.arm64? ? %moffset : get_moffset_frame(%frame)
280end
281
282macro(:get_method_ptr_frame) do |frame|
283  LoadI(frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
284end
285
286macro(:get_method_ptr) do
287  Options.arm64? ? %method_ptr : get_method_ptr_frame(%frame)
288end
289
290macro(:get_tag) do |vreg_ptr|
291  vreg_mirror_ptr := Add(vreg_ptr, get_moffset()).ptr
292  LoadI(vreg_mirror_ptr).Imm(Constants::VREGISTER_VALUE_OFFSET).i64
293end
294
295macro(:set_tag_frame) do |frame, vreg_ptr, tag, moffset|
296  vreg_mirror_ptr := Add(vreg_ptr, moffset).ptr
297  StoreI(vreg_mirror_ptr, tag).Imm(Constants::VREGISTER_VALUE_OFFSET).i64
298end
299
300macro(:set_tag) do |vreg_ptr, tag|
301  set_tag_frame(%frame, vreg_ptr, tag, get_moffset())
302end
303
304macro(:vreg_value) do |operand|
305  get_value(vreg_ptr(operand))
306end
307
308macro(:set_primitive) do |v, value|
309  set_tag(v, Constants::PRIMITIVE_TAG)
310  set_value(v, value)
311end
312
313macro(:set_object) do |v, value|
314  set_tag(v, Constants::OBJECT_TAG)
315  set_value(v, value)
316end
317
318macro(:copy_reg) do |new_frame, dst_idx, src_operand, new_moffset|
319  dst_reg_ptr = frame_vreg_ptr(new_frame, dst_idx)
320  src_reg_ptr = vreg_ptr(src_operand)
321  set_value(dst_reg_ptr, get_value(src_reg_ptr).i64)
322  set_tag_frame(new_frame, dst_reg_ptr, get_tag(src_reg_ptr), new_moffset)
323end
324
325# Accumulator access:
326
327macro(:acc_ptr_frame) do |frame|
328  AddI(frame).Imm(Constants::GET_ACC_OFFSET).ptr
329end
330
331macro(:acc_ptr) do
332  acc_ptr_frame(%frame)
333end
334
335macro(:has_object) do |tag|
336  AndI(tag).Imm("coretypes::TaggedValue::OBJECT_MASK").u64
337end
338
339macro(:save_acc_tag) do |tag|
340  StoreI(acc_ptr, tag).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
341end
342
343macro(:save_acc_value) do |value|
344  StoreI(%frame, value).Imm(Constants::GET_ACC_OFFSET).send(value.type)
345end
346
347macro(:save_acc) do
348  save_acc_var(%acc, %acc_tag)
349end
350
351macro(:save_acc_var) do |acc_var, acc_tag_var|
352  save_acc_tag(acc_tag_var)
353  save_acc_value(acc_var)
354end
355
356macro(:restore_acc) do
357  LoadI(%frame).Imm(Constants::GET_ACC_OFFSET)
358end
359
360macro(:restore_acc_tag) do
361  LoadI(acc_ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
362end
363
364macro(:load_to_acc_reg) do |acc_value, acc_tag_value|
365  acc_tag := acc_tag_value
366  acc := acc_value
367end
368
369macro(:set_acc_primitive) do |value|
370  load_to_acc_reg(value, Constants::PRIMITIVE_TAG)
371end
372
373macro(:set_acc_object) do |value|
374  load_to_acc_reg(value, Constants::OBJECT_TAG)
375end
376
377macro(:copy_acc) do |dst_ptr|
378  StoreI(dst_ptr, acc_tag).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
379  StoreI(dst_ptr, acc).Imm(0).send(acc.type)
380end
381
382macro(:copy_acc_to_reg) do |new_frame, dst_ptr, new_moffset = nil|
383  set_tag_frame(new_frame, dst_ptr, acc_tag, new_moffset)
384  set_value(dst_ptr, acc)
385end
386
387["eq", "ne"].each do |cc|
388  macro(:"assert_has_object_#{cc}") do |vreg_ptr|
389    If(has_object(vreg_ptr), 0).send(:"#{cc.upcase}").Unlikely {
390      Intrinsic(:UNREACHABLE).Terminator.void
391    }
392  end
393end
394
395# Helper macros:
396
397macro(:tail_call) do |addr|
398  Intrinsic(:TAIL_CALL, addr).Terminator.void
399  Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
400end
401
402macro(:dispatch) do |table, pc|
403  opc := readbyte(pc, 0)
404  offset := Mul(u8toword(opc), "WordSize()").word
405  addr := Load(table, offset).ptr
406  LiveOut(pc).DstReg(regmap[:pc]).ptr
407  LiveOut(table).DstReg(regmap[:dispatch]).ptr
408  tail_call(addr)
409end
410
411macro(:call_runtime) do |sym, *args|
412  Call(*args).Method(sym)
413end
414
415macro(:advance_pc_imm) do |pc, imm|
416  AddI(pc).Imm(imm).ptr
417end
418
419macro(:advance_pc_var) do |pc, var|
420  Add(pc, var).ptr
421end
422
423macro(:acc_receiver) do |op, imm|
424  If(imm, 0).EQ {
425    res1 := acc.ptr
426  } Else {
427    res2 := vreg_value(op).ptr
428  }
429  Phi(res1, res2).ptr
430end
431
432macro(:get_receiver) do |v, imm|
433  if imm
434    acc_receiver(v, imm)
435  else
436    vreg_value(v).ptr
437  end
438end
439
440macro(:read_uleb) do |ptr|
441  fast_uleb := u8tou32(LoadI(ptr).Imm(0).u8)
442  If(fast_uleb, 0x80).GE.Unlikely {
443    slow_uleb := call_runtime("ReadUlebEntrypoint", ptr).u32
444  }
445  Phi(fast_uleb, slow_uleb).u32
446end
447
448macro(:method_file_data) do |method_ptr|
449  panda_file := LoadI(method_ptr).Imm(Constants::METHOD_PANDA_FILE_OFFSET).ptr
450  code_id := LoadI(method_ptr).Imm(Constants::METHOD_CODE_ID_OFFSET).u32
451  base := LoadI(panda_file).Imm(0).ptr
452  method_data_ptr := Add(base, u32toword(code_id)).ptr
453end
454
455macro(:get_cache_entry_ptr) do
456  cache := AddI(%tr).Imm(Constants::THREAD_INTERPRETER_CACHE_OFFSET).ptr
457  idx := AndI(ShrI(Bitcast(%pc).SrcType("DataType::POINTER").word).Imm(2).word).Imm("InterpreterCache::N - 1").word
458  Add(cache, Mul(idx, "sizeof(InterpreterCache::Entry)").word).ptr
459end
460
461macro(:cache_entry) do |id, need_restore, need_save, type, slow_path_name, enable_slowpath = true|
462  entry_ptr := get_cache_entry_ptr()
463  entry_pc := LoadI(entry_ptr).Imm(0).ptr
464  entry_caller := LoadI(entry_ptr).Imm("sizeof(void*)").ptr
465  method_ptr := get_method_ptr()
466  If(entry_pc, %pc).EQ.Likely {
467    If(entry_caller, method_ptr).EQ.Likely {
468      fast := LoadI(entry_ptr).Imm("2*sizeof(void*)").send(type)
469    } Else {
470      Goto(:Slow)
471    }
472  } Else {
473    Label(:Slow)
474    if slow_path_name
475      if need_save
476        save_acc_var(acc, acc_tag)
477      end
478      slow := call_runtime(slow_path_name, %tr, method_ptr, u16tou32(id), entry_ptr, %pc).send(type)
479      if need_restore
480        acc_restored := restore_acc().send(acc.type)
481      end
482    else
483      slow := nullptr
484    end
485  }
486  Phi(fast, slow).send(type)
487end
488
489macro(:field_offset) do |id|
490  cache_entry(id, false, false, :ptr, "GetFieldByIdEntrypoint")
491end
492
493macro(:static_field) do |id, need_restore = true, need_save = true|
494  cache_entry(id, need_restore, need_save, :ptr, "GetStaticFieldByIdEntrypoint")
495end
496
497macro(:callee_ptr) do |id, need_save|
498  cache_entry(id, true, need_save, :ptr, "GetCalleeMethodFromBytecodeId")
499end
500
501macro(:type_ptr) do |id, need_restore = false, need_save = true|
502  cache_entry(id, need_restore, need_save, :ptr, "ResolveTypeByIdEntrypoint")
503end
504
505macro(:find_catch_block) do
506  handler_pc := call_runtime("FindCatchBlockInIFrames", %tr, %frame, pc).ptr
507  If(handler_pc, pc).EQ.Unlikely {
508    Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
509  }
510  frame_eh := LoadI(%tr).Imm(Constants::THREAD_FRAME_OFFSET).ptr
511  if Options.arm64?
512    moffset_eh := get_moffset_frame(frame_eh)
513    method_ptr_eh := get_method_ptr_frame(frame_eh)
514  end
515  acc_ptr := acc_ptr_frame(frame_eh).ptr
516  acc_eh := LoadI(acc_ptr).Imm(0).u64
517  acc_tag_eh := LoadI(AddI(acc_ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).ptr).u64
518  handler_pc
519end
520
521macro(:move_to_exception) do
522  LiveOut(table).DstReg(regmap[:dispatch]).ptr
523  LiveOut(frame).DstReg(regmap[:frame]).ptr
524  if Options.arm64?
525    LiveOut(moffset).DstReg(regmap[:moffset]).word
526    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
527  end
528  LiveOut(tr).DstReg(regmap[:tr]).ptr
529  LiveOut(pc).DstReg(regmap[:pc]).ptr
530  addr := Load(table, Panda::dispatch_table.handler_names.size * 8).ptr
531  tail_call(addr)
532end
533
534macro(:set_no_hoist_flag) do |inst|
535  inst.SetFlag("compiler::inst_flags::NO_HOIST")
536end
537
538macro(:move_to_exit) do |pc, acc, acc_tag|
539  LiveOut(acc).DstReg(regmap[:acc]).ptr
540  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr
541  LiveOut(frame).DstReg(regmap[:frame]).ptr
542  if Options.arm64?
543    LiveOut(moffset).DstReg(regmap[:moffset]).word
544    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
545  end
546  LiveOut(tr).DstReg(regmap[:tr]).ptr
547
548  dispatch(table, pc)
549end
550
551macro(:exception_val) do
552  LoadI(%tr).Imm(Constants::THREAD_EXCEPTION_OFFSET).ptr
553end
554
555# make a stop at Safepoint if the flag is set
556macro(:safepoint) do |acc_type, is_save_acc|
557  flags := LoadI(%tr).Imm(Constants::THREAD_FLAG_OFFSET).u16
558  If(flags, 0).NE.Unlikely {
559    save_acc_var(acc, acc_tag) if is_save_acc
560    call_runtime("SafepointEntrypointInterp", %tr).void
561    restored_acc := restore_acc().send(acc_type)
562  }
563  acc_ := Phi(acc, restored_acc).send(acc_type)
564end
565
566macro(:verify) do |method, is_initobj|
567  if is_initobj
568    method_flags := LoadI(method).Imm(Constants::METHOD_ACCESS_FLAGS_OFFSET).Volatile(true).u32
569  end
570  intrinsic_bit := ShrI(AndI(method_flags).Imm("ACC_INTRINSIC").u32).Imm(Constants::INTRINSIC_MASK_SHIFT).u32
571  verif_status := ShrI(AndI(method_flags).Imm("VERIFICATION_STATUS_MASK").u32).Imm("VERIFICATION_STATUS_SHIFT").u32
572  If(Or(verif_status, intrinsic_bit).u32, Constants::VERIFIED_OK).LT.Unlikely {
573    If(call_runtime("Verify", method).b, 0).EQ.Unlikely {
574      move_to_exception
575    }
576  }
577end
578
579macro(:update_hotness_counter) do |callee, is_initobj|
580  verify(callee, is_initobj)
581
582  hc := LoadI(callee).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16;
583  If(hc, 0).LE.Unlikely {
584    call_runtime("CallCompilerSlowPath", %tr, callee).void;
585  } Else {
586    StoreI(callee, SubI(hc).Imm(1).i16).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16
587  }
588end
589
590macro(:update_branch_taken) do |method_ptr|
591  prof_data := LoadI(method_ptr).Volatile(true).Imm(Constants::METHOD_NATIVE_POINTER_OFFSET).ptr
592  #TODO(mshimenkov): place likely/unlikely
593  If(prof_data, 0).NE {
594    call_runtime("UpdateBranchTaken", method_ptr, %frame, %pc, prof_data).void
595  }
596end
597
598macro(:update_branch_untaken) do |method_ptr|
599  prof_data := LoadI(method_ptr).Volatile(true).Imm(Constants::METHOD_NATIVE_POINTER_OFFSET).ptr
600  #TODO(mshimenkov): place likely/unlikely
601  If(prof_data, 0).NE {
602    call_runtime("UpdateBranchUntaken", method_ptr, %frame, %pc, prof_data).void
603  }
604end
605
606macro(:instrument_branches) do |imm, acc_type, method_ptr|
607  inc_pc := advance_pc_var(pc, i32tou64(imm))
608  If(imm, 0).LE {
609    safepoint(acc_type, true)
610
611    hc := LoadI(method_ptr).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16;
612
613    If(hc, 0).LE.Unlikely {
614      osr_success := call_runtime("CallCompilerSlowPathOSR", %tr, method_ptr, %frame, acc_, acc_tag, ins_offset, imm).i32
615      IfImm(osr_success).Imm(0).NE.Unlikely {
616        handle_fake_return()
617      }
618    } Else {
619      StoreI(method_ptr, SubI(hc).Imm(1).i16).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16
620    }
621
622    tmp_acc := Phi(acc_, fake_acc, acc_).send(acc_type)
623    tmp_acc_tag := Phi(acc_tag, fake_acc_tag, acc_tag).i64
624    tmp_frame := Phi(%frame, fake_frame, %frame).ptr
625    if Options.arm64?
626      tmp_moffset := Phi(%moffset, fake_moffset, %moffset).word
627      tmp_method_ptr := Phi(%method_ptr, fake_method_ptr, %method_ptr).ptr
628    end
629    tmp_pc := Phi(inc_pc, fake_pc, inc_pc).ptr
630  }
631
632  acc_sf := Phi(acc, tmp_acc).send(acc_type)
633  acc_tag_sf := Phi(acc_tag, tmp_acc_tag).i64
634  frame_sf := Phi(%frame, tmp_frame).ptr
635  if Options.arm64?
636    moffset_sf := Phi(%moffset, tmp_moffset).word
637    method_ptr_sf := Phi(%method_ptr, tmp_method_ptr).ptr
638  end
639  Phi(inc_pc, tmp_pc).ptr
640end
641
642macro(:initobj_call) do |id, size, callee, nargs, copy_lambda, op_format, first_vreg|
643  If(callee, 0).EQ.Unlikely {
644    klass_1 := call_runtime("GetMethodClassById", get_method_ptr(), id).ref
645    acc_ := nullptr
646    acc_tag_ := Constants::OBJECT_TAG
647    If(klass_1, 0).EQ.Unlikely {
648      move_to_exception
649    }
650  } Else {
651    klass_2 := LoadI(callee).Imm(Constants::METHOD_CLASS_OFFSET).ref
652  }
653  acc := Phi(acc_, acc).send(acc.type)
654  acc_tag := Phi(acc_tag_, acc_tag).i64
655  klass := Phi(klass_1, klass_2).ref
656  save_acc_var(acc, acc_tag)
657
658  component_type := LoadI(klass).Imm(Constants::CLASS_COMPONENT_OFFSET).ref
659  If(component_type, 0).NE.Unlikely {
660    array := call_runtime("CreateMultiDimensionalArrayById", %tr, %frame, klass, get_method_ptr(), id, pc, op_format).ptr
661    If(array, 0).EQ.Unlikely {
662      LiveOut(array).DstReg(regmap[:acc]).ptr
663      LiveOut(Constants::OBJECT_TAG).DstReg(regmap[:acc_tag]).ptr
664      move_to_exception
665    }
666    move_to_exit(advance_pc_imm(%pc, size), array, Constants::OBJECT_TAG)
667  }
668  If(callee, 0).EQ.Unlikely {
669    entry_ptr := get_cache_entry_ptr()
670    callee_0 := call_runtime("GetCalleeMethodFromBytecodeId", %tr, get_method_ptr(), u16tou32(id), entry_ptr, %pc).ptr
671  }
672  callee_ := Phi(callee, callee_0).ptr
673  If(callee_, 0).EQ.Unlikely {
674    move_to_exception
675  }
676
677  class_flags := LoadI(klass).Imm(Constants::BASE_CLASS_FLAGS_OFFSET).u32
678  If(AndI(class_flags).Imm("panda::Class::STRING_CLASS").u32, 0).NE.Unlikely {
679    ctor_arg := vreg_value(first_vreg).ref
680    str := call_runtime("VmCreateString", %tr, callee_, ctor_arg).ptr
681    If(str, 0).EQ.Unlikely {
682      LiveOut(str).DstReg(regmap[:acc]).ptr
683      LiveOut(Constants::OBJECT_TAG).DstReg(regmap[:acc_tag]).ptr
684      move_to_exception
685    }
686    move_to_exit(advance_pc_imm(%pc, size), str, Constants::OBJECT_TAG)
687  }
688
689  obj := call_runtime("CreateObjectByClassInterpreter", %tr, klass).ptr
690  If(obj, 0).EQ.Unlikely {
691    move_to_exception
692  }
693  # no restore as acc is dead now
694  acc := obj
695  acc_tag := Constants::OBJECT_TAG
696  save_acc_var(obj, Constants::OBJECT_TAG)
697  generic_call(id, size, true, callee_, nargs, copy_lambda)
698end
699
700macro(:align_up) do |val|
701  alignment = Constants::DEFAULT_FRAME_ALIGNMENT_IN_BYTES
702  AndI(AddI(val).Imm("#{alignment} - 1U").word).Imm("~(#{alignment} - 1U)").word
703end
704
705macro(:get_alloc_size) do |size|
706  v := Mul(size, Constants::VREGISTER_SIZE).word
707  align_up(AddI(v).Imm("CORE_EXT_FRAME_DATA_SIZE + #{Constants::FRAME_SIZE}").word).word
708end
709
710macro(:create_frame) do |frame_size, callee|
711  actual_size := Add(frame_size, frame_size).word
712
713  if defines.DEBUG
714    If(callee, 0).EQ.Unlikely {
715      Intrinsic(:UNREACHABLE).Terminator.void
716    }
717  end
718  alloc_sz := get_alloc_size(actual_size)
719  mirror_sz := Mul(frame_size, Constants::VREGISTER_SIZE).word
720  mirror_offset = AddI(mirror_sz).Imm("CORE_EXT_FRAME_DATA_SIZE + #{Constants::FRAME_SIZE}").word
721  mem := call_runtime("AllocFrameInterp", %tr, alloc_sz).ptr
722  If(mem, 0).EQ.Unlikely {
723    move_to_exception
724  }
725  mirror_frame := Add(mem, mirror_offset).ptr
726  frame_end_addr := Add(mem, alloc_sz).ptr
727
728  If(mirror_frame, frame_end_addr).EQ.Unlikely do
729    Goto(:Exit_)
730  end
731  Label(:Head_)
732  mf := Phi(mirror_frame, mirror_frame_).ptr
733  StoreI(mf, 0x0).Imm(0).word
734  mirror_frame_ := AddI(mf).Imm(Constants::VREGISTER_SIZE).ptr
735  If(mf, frame_end_addr).LT.Likely do
736    Goto(:Head_)
737  end
738  Label(:Exit_)
739
740  call_runtime("InitializeFrame", mem, callee, %frame, frame_size).ptr
741end
742
743macro(:generic_call) do |id, size, is_initobj, callee, nargs, copy_lambda|
744  safepoint(acc.type, !is_initobj)
745  acc := acc_
746  update_hotness_counter(callee, is_initobj)
747
748  entrypoint := LoadI(callee).Imm(Constants::METHOD_COMPILED_ENTRY_POINT_OFFSET).ptr
749  If(call_runtime("IsCompiled", entrypoint).i32, 0).NE.Unlikely {
750    save_acc_var(acc, acc_tag) unless is_initobj
751    call_runtime("InterpreterToCompiledCodeBridge", pc, frame, callee, %tr).void
752
753    StoreI(%tr, 0).Imm(Constants::GET_FRAME_KIND_OFFSET).u16
754    StoreI(%tr, %frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
755
756    If(exception_val(), 0).NE.Unlikely {
757      move_to_exception
758    }
759    acc_native := restore_acc().send(acc.type)
760    acc_tag_native := restore_acc_tag().i64
761
762    pc_native := advance_pc_imm(%pc, size)
763  } Else {
764    num_vregs := read_uleb(method_file_data(callee))
765    num_vregs := u32toword(num_vregs)
766    num_vregs := AddI(num_vregs).Imm(1).word if is_initobj
767    if nargs
768      num_args := nargs
769    else
770      num_args := u32toword(LoadI(callee).Imm(Constants::METHOD_NUM_ARGS_OFFSET).u32)
771    end
772
773    frame_size := Add(num_vregs, num_args).word
774    new_frame := create_frame(frame_size, callee)
775
776    new_moffset := Mul(u32toword(frame_size), Constants::VREGISTER_SIZE).word
777    method_ptr := callee
778    # TODO(mbolshov): we could negate IS_STACKLESS and don't do this store every time
779    frame_flags := "Frame::IS_STACKLESS"
780    if is_initobj
781      frame_flags := Or(frame_flags, "Frame::IS_INITOBJ").word
782      obj_vreg_ptr := frame_vreg_ptr(new_frame, SubI(num_vregs).Imm(1).word)
783      set_tag_frame(new_frame, obj_vreg_ptr, Constants::OBJECT_TAG, new_moffset)
784      set_value(obj_vreg_ptr, restore_acc().send(acc.type))
785    end
786    StoreI(new_frame, frame_flags).Imm(Constants::FRAME_FLAGS_OFFSET).word
787    copy_lambda.call(new_frame, num_vregs, num_args, new_moffset)
788    StoreI(new_frame, frame).Imm(Constants::FRAME_PREV_FRAME_OFFSET).ptr
789    StoreI(frame, advance_pc_imm(pc, size)).Imm(Constants::FRAME_NEXT_INSTRUCTION_OFFSET).ptr
790    pc_int := call_runtime("GetInstructionsByMethod", callee).ptr
791    StoreI(new_frame, pc_int).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
792    StoreI(%tr, new_frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
793  }
794  load_to_acc_reg(Phi(acc_native, acc).send(acc.type), Phi(acc_tag_native, acc_tag.i64).i64)
795  frame := Phi(%frame, new_frame).ptr
796  if Options.arm64?
797    moffset := Phi(%moffset, new_moffset).word
798    method_ptr := Phi(%method_ptr, method_ptr).ptr
799  end
800  pc := Phi(pc_native, pc_int).ptr
801end
802
803macro(:generic_return) do |copy_lambda|
804  frame_flags := LoadI(%frame).Imm(Constants::FRAME_FLAGS_OFFSET).word
805  If(And(frame_flags, "Frame::IS_STACKLESS").word, 0).NE.Likely {
806    prev_frame := LoadI(%frame).Imm(Constants::FRAME_PREV_FRAME_OFFSET).ptr
807    next_pc := LoadI(prev_frame).Imm(Constants::FRAME_NEXT_INSTRUCTION_OFFSET).ptr
808    copy_lambda.call(prev_frame, frame_flags)
809    StoreI(%tr, prev_frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
810    call_runtime("FreeFrameInterp", frame, %tr).void
811    frame := prev_frame
812    if Options.arm64?
813      moffset := get_moffset_frame(frame)
814      method_ptr := get_method_ptr_frame(frame)
815    end
816    pc := next_pc
817  } Else {
818    save_acc()
819    Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
820  }
821end
822
823# Handlers:
824
825macro(:handle_throw) do |vs|
826  IfImm(vs).Imm(0).EQ.Unlikely {
827    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
828  } Else {
829    call_runtime("ThrowExceptionFromInterpreter", %tr, vs, %frame, %pc).void
830  }
831  pc := find_catch_block()
832  frame := frame_eh
833  if Options.arm64?
834    moffset := moffset_eh
835    method_ptr := method_ptr_eh
836  end
837  load_to_acc_reg(acc_eh, acc_tag_eh)
838end
839
840macro(:handle_movi) do |vd, imm|
841  set_primitive(vd, imm).i32
842end
843
844macro(:handle_movi_64) do |vd, imm|
845  set_primitive(vd, imm).i64
846end
847
848macro(:handle_mov) do |vd, vs|
849  set_primitive(vd, vs).u32
850end
851
852macro(:handle_lda) do |vs|
853  set_acc_primitive(vs)
854end
855
856macro(:handle_lda_str_id32) do |id|
857  string := call_runtime("ResolveStringByIdEntrypoint", %tr, %frame, id).ptr
858  If(string, 0).EQ.Unlikely {
859    move_to_exception
860  }
861  set_acc_object(string)
862end
863
864macro(:handle_lda_type_id16) do |id|
865  type := type_ptr(id)
866  If(type, 0).EQ.Unlikely {
867    move_to_exception
868  }
869  type_obj := LoadI(type).Imm("panda::Class::GetManagedObjectOffset()").ptr
870  set_acc_object(type_obj)
871end
872
873macro(:handle_lda_const_v8_id16) do |v, id|
874  save_acc()
875  cnst := call_runtime("ResolveLiteralArrayByIdEntrypoint", %tr, get_method_ptr(), u16tou32(id)).ref
876  acc := restore_acc().send(acc.type)
877  If(cnst, 0).EQ.Unlikely {
878    move_to_exception
879  }
880  set_object(v, cnst).ref
881end
882
883macro(:handle_ldai_imm) do |imm|
884  set_acc_primitive(imm)
885end
886
887macro(:handle_ldai_64_imm) do |imm|
888  set_acc_primitive(imm)
889end
890
891macro(:handle_fldai_imm) do |imm|
892  set_acc_primitive(imm)
893end
894
895macro(:handle_fldai_64_imm) do |imm|
896  set_acc_primitive(imm)
897end
898
899macro(:handle_sta_v8) do |vd|
900  set_primitive(vd, acc.u32).u32
901end
902
903macro(:handle_sta_64_v8) do |vd|
904  set_primitive(vd, acc.u64).u64
905end
906
907macro(:handle_jmp_imm) do |pc, imm|
908  next_pc := instrument_branches(imm, acc.type, get_method_ptr())
909  load_to_acc_reg(acc_sf, acc_tag_sf)
910  frame := frame_sf
911  if Options.arm64?
912    moffset := moffset_sf
913    method_ptr := method_ptr_sf
914  end
915  next_pc
916end
917
918macro(:handle_inci_v4_imm4) do |v, imm|
919  val := get_value(v).i32
920  add := Add(val, imm).i32
921  set_value(v, add).i32
922end
923
924[['LT', ''], ['B', 'u']].each do |cc, sign|
925  macro(:"handle_#{sign}cmp") do |acc_val, vs|
926    # TODO: use Cmp IR instruction?
927    If(acc_val, vs).send(:"#{cc.upcase}") {
928      res1 := -1
929    } Else {
930      If(acc_val, vs).EQ {
931        res2 := 0
932      } Else {
933        res3 := 1
934      }
935    }
936    acc := Phi(res1, res2, res3).i32
937  end
938end
939
940['Add', 'Sub', 'And', 'Mul', 'Or', 'Xor', 'Shl', 'Shr', 'AShr'].each do |op|
941  # v4_v4
942  macro(:"handle_#{op.downcase}_v4_v4") do |vs1, vs2|
943    set_acc_primitive(send(op, vs1, vs2).i32).i32
944  end
945  # v4_v4 without acc
946  macro(:"handle_#{op.downcase}_v_v4_v4") do |v1, v2|
947    set_primitive(v1, send(op, get_value(v1).i32, v2).i32).i32
948  end
949  # v8
950  macro(:"handle_#{op.downcase}2_v8") do |vs|
951    acc := send(op, acc.i32, vs).i32
952  end
953  # 64_v8
954  macro(:"handle_#{op.downcase}2_64_v8") do |vs|
955    acc := send(op, acc.i64, vs).i64
956  end
957  # v8_v8
958  macro(:"handle_#{op.downcase}2_v8_v8") do |vd, vs|
959    set_primitive(vd, send(op, acc.i32, vs).i32).i32
960  end
961  # 64_v8_v8
962  macro(:"handle_#{op.downcase}2_64_v8_v8") do |vd, vs|
963    set_primitive(vd, send(op, acc.i64, vs).i64).i64
964  end
965  # imm
966  macro(:"handle_#{op.downcase}i_imm") do |imm|
967    acc := send(op, acc.i32, imm).i32
968  end
969  # v4_v4_imm
970  macro(:"handle_#{op.downcase}i_v4_v4_imm") do |vd, vs, imm|
971    set_primitive(vd, send(op, vs, imm).i32)
972  end
973end
974
975['Add', 'Sub', 'Mul', 'Div'].each do |op|
976  macro(:"handle_f#{op.downcase}2_v8") do |vs|
977    acc := send(op, acc.f32, vs).f32
978  end
979  macro(:"handle_f#{op.downcase}2_64_v8") do |vs|
980    acc := send(op, acc.f64, vs).f64
981  end
982  macro(:"handle_f#{op.downcase}2_v8_v8") do |vd, vs|
983    set_primitive(vd, send(op, acc.f32, vs).f32).f32
984  end
985  macro(:"handle_f#{op.downcase}2_64_v8_v8") do |vd, vs|
986    set_primitive(vd, send(op, acc.f64, vs).f64).f64
987  end
988end
989
990macro(:handle_fmod2_v8) do |vs|
991  acc := call_runtime("fmodf", acc.f32, vs).f32
992end
993
994macro(:handle_fmod2_64_v8) do |vs|
995  acc := call_runtime("fmod", acc.f64, vs).f64
996end
997
998macro(:handle_fmod2_v8_v8) do |vd, vs|
999  set_primitive(vd, call_runtime("fmodf", acc.f32, vs).f32).f32
1000end
1001
1002macro(:handle_fmod2_64_v8_v8) do |vd, vs|
1003  set_primitive(vd, call_runtime("fmod", acc.f64, vs).f64).f64
1004end
1005
1006['Div', 'Mod'].each do |op|
1007  macro(:"handle_#{op.downcase}_v4_v4") do |vs1, vs2|
1008    If(vs2, 0).EQ.Unlikely {
1009      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1010      move_to_exception
1011    }
1012    set_acc_primitive(send(op, vs1, vs2).i32)
1013  end
1014  macro(:"handle_#{op.downcase}_v_v4_v4") do |v1, v2|
1015    If(v2, 0).EQ.Unlikely {
1016      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1017      move_to_exception
1018    }
1019    set_primitive(v1, send(op, get_value(v1).i32, v2).i32)
1020  end
1021  macro(:"handle_#{op.downcase}2_v8") do |vs|
1022    If(vs, 0).EQ.Unlikely {
1023      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1024      move_to_exception
1025    }
1026    acc := send(op, acc.i32, vs).i32
1027  end
1028  macro(:"handle_#{op.downcase}2_64_v8") do |vs|
1029    If(vs, 0).EQ.Unlikely {
1030      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1031      move_to_exception
1032    }
1033    acc := send(op, acc.i64, vs).i64
1034  end
1035  macro(:"handle_#{op.downcase}2_v8_v8") do |vd, vs|
1036    If(vs, 0).EQ.Unlikely {
1037      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1038      move_to_exception
1039    }
1040    set_primitive(vd, send(op, acc.i32, vs).i32).i32
1041  end
1042  macro(:"handle_#{op.downcase}2_64_v8_v8") do |vd, vs|
1043    If(vs, 0).EQ.Unlikely {
1044      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1045      move_to_exception
1046    }
1047    set_primitive(vd, send(op, acc.i64, vs).i64).i64
1048  end
1049  macro(:"handle_#{op.downcase}u2_v8") do |vs|
1050    If(vs, 0).EQ.Unlikely {
1051      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1052      move_to_exception
1053    }
1054    acc := send(op, acc.u32, vs).u32
1055  end
1056  macro(:"handle_#{op.downcase}u2_64_v8") do |vs|
1057    If(vs, 0).EQ.Unlikely {
1058      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1059      move_to_exception
1060    }
1061    acc := send(op, acc.u64, vs).u64
1062  end
1063  macro(:"handle_#{op.downcase}u2_v8_v8") do |vd, vs|
1064    If(vs, 0).EQ.Unlikely {
1065      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1066      move_to_exception
1067    }
1068    set_primitive(vd, send(op, acc.u32, vs).u32).u32
1069  end
1070  macro(:"handle_#{op.downcase}u2_64_v8_v8") do |vd, vs|
1071    If(vs, 0).EQ.Unlikely {
1072      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1073      move_to_exception
1074    }
1075    set_primitive(vd, send(op, acc.u64, vs).u64).u64
1076  end
1077  macro(:"handle_#{op.downcase}i_imm") do |imm|
1078    If(imm, 0).EQ.Unlikely {
1079      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1080      move_to_exception
1081    }
1082    acc := send(op, acc.i32, imm).i32
1083  end
1084  macro(:"handle_#{op.downcase}i_v4_v4_imm") do |vd, vs, imm|
1085    If(imm, 0).EQ.Unlikely {
1086      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1087      move_to_exception
1088    }
1089    set_primitive(vd, send(op, vs, imm).i32)
1090  end
1091end
1092
1093# Unary
1094['Not', 'Neg'].each do |op|
1095  macro(:"handle_#{op.downcase}") do
1096    acc := send(op, acc.i32).i32
1097  end
1098  macro(:"handle_#{op.downcase}_64") do
1099    acc := send(op, acc.i64).i64
1100  end
1101end
1102
1103[['', :f32], ['_64', :f64]].each do |name, type|
1104  macro(:"handle_fneg#{name}") do
1105    acc := Neg(acc.send(type)).send(type)
1106  end
1107end
1108
1109macro(:handle_newarr_v4_v4_id16) do |vd, vs, id|
1110  If(vs, 0).LT.Unlikely {
1111    call_runtime("ThrowNegativeArraySizeExceptionFromInterpreter", vs).void
1112    move_to_exception
1113  }
1114  save_acc()
1115  array := call_runtime("CreateArrayByIdEntrypoint", %tr, get_method_ptr(), u16tou32(id), vs).ref
1116  acc := restore_acc().ptr
1117  If(array, 0).EQ.Unlikely {
1118    move_to_exception
1119  }
1120  set_object(vd, array).ref
1121end
1122
1123macro(:handle_lenarr_v8) do |vs|
1124  If(vs, 0).EQ.Unlikely {
1125    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1126    move_to_exception
1127  }
1128  len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1129  set_acc_primitive(len_array)
1130end
1131
1132[['ldarr', :i32, 2], ['ldarr_64', :i64, 3], ['fldarr_64', :f64, 3], ['fldarr_32', :f32, 2]].each do |name, type, elem_size_shift|
1133  macro(:"handle_#{name}_v8") do |vs|
1134    If(vs, 0).EQ.Unlikely {
1135      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1136      move_to_exception
1137    }
1138    len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1139    If(acc.i32, len_array).AE.Unlikely {
1140      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1141      move_to_exception
1142    }
1143    elem_offset = AddI(ShlI(acc.i32).Imm(elem_size_shift).i32).Imm(Constants::ARRAY_DATA_OFFSET).i32
1144    acc := Load(vs, elem_offset).send(type)
1145  end
1146end
1147
1148[[8, 0], [16, 1]].each do |size, elem_size_shift|
1149  macro(:"handle_ldarr_#{size}_v8") do |vs|
1150    If(vs, 0).EQ.Unlikely {
1151      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1152      move_to_exception
1153    }
1154    len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1155    If(acc.i32, len_array).AE.Unlikely {
1156      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1157      move_to_exception
1158    }
1159    elem_offset = AddI(ShlI(acc.i32).Imm(elem_size_shift).i32).Imm(Constants::ARRAY_DATA_OFFSET).i32
1160    load_array := Load(vs, elem_offset).send(:"i#{size}")
1161    acc := send(:"i#{size}toi32", load_array)
1162  end
1163end
1164
1165[[8, 0], [16, 1]].each do |size, elem_size_shift|
1166  macro(:"handle_ldarru_#{size}_v8") do |vs|
1167    If(vs, 0).EQ.Unlikely {
1168      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1169      move_to_exception
1170    }
1171    len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1172    If(acc.i32, len_array).AE.Unlikely {
1173      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1174      move_to_exception
1175    }
1176    elem_offset = AddI(ShlI(acc.i32).Imm(elem_size_shift).i32).Imm(Constants::ARRAY_DATA_OFFSET).i32
1177    load_array := Load(vs, elem_offset).send(:"u#{size}")
1178    acc := send(:"u#{size}tou32", load_array)
1179  end
1180end
1181
1182macro(:handle_ldarr_obj_v8) do |vs|
1183  If(vs, 0).EQ.Unlikely {
1184    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1185    move_to_exception
1186  }
1187  len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1188  If(acc.i32, len_array).AE.Unlikely {
1189    call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1190    move_to_exception
1191  }
1192  elem_offset = AddI(ShlI(acc.i32).Imm(Constants::REFERENCE_TYPE_SHIFT).i32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1193  load_array := Load(vs, elem_offset).ref
1194  set_acc_object(load_array)
1195end
1196
1197[[8, 0], [16, 1]].each do |size, elem_size_shift|
1198  macro(:"handle_starr_#{size}_v4_v4") do |vs1, vs2|
1199    If(vs1, 0).EQ.Unlikely {
1200      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1201      move_to_exception
1202    }
1203    len_array := LoadI(vs1).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1204    If(vs2, len_array).AE.Unlikely {
1205      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", vs2, len_array).void
1206      move_to_exception
1207    }
1208    elem_offset = AddI(ShlI(vs2).Imm(elem_size_shift).u32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1209    Store(vs1, elem_offset, acc.i32).send(:"i#{size}")
1210  end
1211end
1212
1213[['starr', :i32, 2], ['starr_64', :i64, 3], ['fstarr_32', :f32, 2], ['fstarr_64', :f64, 3]].each do |name, type, elem_size_shift|
1214  macro(:"handle_#{name}_v4_v4") do |vs1, vs2|
1215    If(vs1, 0).EQ.Unlikely {
1216      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1217      move_to_exception
1218    }
1219    len_array := LoadI(vs1).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1220    If(vs2, len_array).AE.Unlikely {
1221      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", vs2, len_array).void
1222      move_to_exception
1223    }
1224    elem_offset = AddI(ShlI(vs2).Imm(elem_size_shift).u32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1225    Store(vs1, elem_offset, acc.send(type)).send(type)
1226  end
1227end
1228
1229macro(:handle_starr_obj_v4_v4) do |vs1, vs2|
1230  If(vs1, 0).EQ.Unlikely {
1231    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1232    move_to_exception
1233  }
1234  len_array := LoadI(vs1).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1235  If(vs2, len_array).AE.Unlikely {
1236    call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", vs2, len_array).void
1237    move_to_exception
1238  }
1239  res := call_runtime("CheckStoreArrayReferenceFromInterpreter", vs1, acc.ref).u8
1240  If(res, 0).NE.Unlikely {
1241    move_to_exception
1242  }
1243  elem_offset = AddI(ShlI(vs2).Imm(Constants::REFERENCE_TYPE_SHIFT).u32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1244  Store(vs1, elem_offset, acc.ref).SetNeedBarrier(true).ref
1245end
1246
1247macro(:handle_newobj_v8_id16) do |vd, id|
1248  save_acc()
1249  type := type_ptr(id, false, false)
1250  If(type, 0).EQ.Unlikely {
1251    move_to_exception
1252  }
1253  object := call_runtime("CreateObjectByClassInterpreter", %tr, type).ref
1254  acc := restore_acc().ptr
1255  If(object, 0).EQ.Unlikely {
1256    move_to_exception
1257  }
1258  set_object(vd, object).ref
1259end
1260
1261macro(:assert_non_volatile) do |field|
1262  if defines.DEBUG
1263    field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1264    is_volatile := AndI(field_access_flags).Imm("ACC_VOLATILE").u32
1265    If(is_volatile, 0).NE.Unlikely {
1266        Intrinsic(:UNREACHABLE).Terminator.void
1267    }
1268  end
1269end
1270
1271macro(:handle_stobj_v8_id16) do |vs, id|
1272  If(vs, 0).EQ.Unlikely {
1273    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1274    move_to_exception
1275  }
1276  field := field_offset(id)
1277  If(field, 0).EQ.Unlikely {
1278    move_to_exception
1279  }
1280
1281  assert_non_volatile(field)
1282
1283  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1284  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1285  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1286
1287  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1288    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1289      If(field_type_id, typeid).EQ {
1290        acc_type = field_type[0] + "32"
1291        Store(vs, offset, acc.send(:"#{acc_type}")).send(:"#{field_type}")
1292      }
1293    end
1294  } Else {
1295    Store(vs, offset, acc.u32).u32
1296  }
1297end
1298
1299macro(:handle_stobj_64_v8_id16) do |vs, id|
1300  If(vs, 0).EQ.Unlikely {
1301    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1302    move_to_exception
1303  }
1304  field := field_offset(id)
1305  If(field, 0).EQ.Unlikely {
1306    move_to_exception
1307  }
1308
1309  assert_non_volatile(field)
1310
1311  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1312  Store(vs, offset, acc.u64).u64
1313end
1314
1315macro(:handle_stobj_obj_v8_id16) do |vs, id|
1316  If(vs, 0).EQ.Unlikely {
1317    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1318    move_to_exception
1319  }
1320  field := field_offset(id)
1321  If(field, 0).EQ.Unlikely {
1322    move_to_exception
1323  }
1324
1325  assert_non_volatile(field)
1326
1327  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1328  Store(vs, offset, acc.ref).SetNeedBarrier(true).ref
1329end
1330
1331macro(:handle_stobj_v_v4_v4_id16) do |v1, v2, id|
1332  If(v2, 0).EQ.Unlikely {
1333    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1334    move_to_exception
1335  }
1336  field := field_offset(id)
1337  If(field, 0).EQ.Unlikely {
1338    move_to_exception
1339  }
1340
1341  assert_non_volatile(field)
1342
1343  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1344  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1345  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1346
1347  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1348    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1349      If(field_type_id, typeid).EQ {
1350        reg_type = field_type[0] + "32"
1351        Store(v2, offset, v1.send(:"#{reg_type}")).send(:"#{field_type}")
1352      }
1353    end
1354  } Else {
1355    Store(v2, offset, v1.u32).u32
1356  }
1357end
1358
1359macro(:handle_stobj_v_64_v4_v4_id16) do |v1, v2, id|
1360  If(v2, 0).EQ.Unlikely {
1361    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1362    move_to_exception
1363  }
1364  field := field_offset(id)
1365  If(field, 0).EQ.Unlikely {
1366    move_to_exception
1367  }
1368
1369  assert_non_volatile(field)
1370
1371  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1372  Store(v2, offset, v1.u64).u64
1373end
1374
1375macro(:handle_stobj_v_obj_v4_v4_id16) do |v1, v2, id|
1376  If(v2, 0).EQ.Unlikely {
1377    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1378    move_to_exception
1379  }
1380  field := field_offset(id)
1381  If(field, 0).EQ.Unlikely {
1382    move_to_exception
1383  }
1384
1385  assert_non_volatile(field)
1386
1387  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1388  Store(v2.ref, offset, v1.ref).SetNeedBarrier(true).ref
1389end
1390
1391macro(:handle_ldobj_v8_id16) do |vs, id|
1392  If(vs, 0).EQ.Unlikely {
1393    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1394    move_to_exception
1395  }
1396  field := field_offset(id)
1397  # no restore as acc is going to be redefined
1398  If(field, 0).EQ.Unlikely {
1399    move_to_exception
1400  }
1401
1402  assert_non_volatile(field)
1403
1404  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1405  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1406  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1407
1408  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1409    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1410      If(field_type_id, typeid).EQ {
1411        store_type = field_type[0] + "32"
1412        value := Load(vs, offset).send(:"#{field_type}")
1413        acc_value := send(:"#{field_type}to#{store_type}", value)
1414      }
1415      acc := Phi(acc.u64, acc_value.u64).u64
1416    end
1417    acc_casted_slow := acc
1418  } Else {
1419    acc_casted_fast := u32tou64(Load(vs, offset).u32)
1420  }
1421
1422  acc := Phi(acc_casted_slow.u64, acc_casted_fast.u64).u64
1423  acc_tag := Constants::PRIMITIVE_TAG
1424end
1425
1426macro(:handle_ldobj_64_v8_id16) do |vs, id|
1427  If(vs, 0).EQ.Unlikely {
1428    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1429    move_to_exception
1430  }
1431  field := field_offset(id)
1432  # no restore as acc is going to be redefined
1433  If(field, 0).EQ.Unlikely {
1434    move_to_exception
1435  }
1436
1437  assert_non_volatile(field)
1438
1439  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1440  acc := Load(vs, offset).u64
1441  acc_tag := Constants::PRIMITIVE_TAG
1442end
1443
1444macro(:handle_ldobj_obj_v8_id16) do |vs, id|
1445  If(vs, 0).EQ.Unlikely {
1446    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1447    move_to_exception
1448  }
1449  field := field_offset(id)
1450  # no restore as acc is going to be redefined
1451  If(field, 0).EQ.Unlikely {
1452    move_to_exception
1453  }
1454
1455  assert_non_volatile(field)
1456
1457  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1458  value := Load(vs, offset).ref
1459  set_acc_object(value).ref
1460end
1461
1462macro(:handle_ldobj_v_v4_v4_id16) do |vd, vs, id|
1463  If(vs, 0).EQ.Unlikely {
1464    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1465    move_to_exception
1466  }
1467  field := field_offset(id)
1468  If(field, 0).EQ.Unlikely {
1469    move_to_exception
1470  }
1471
1472  assert_non_volatile(field)
1473
1474  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1475  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1476  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1477
1478  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1479    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1480      If(field_type_id, typeid).EQ {
1481        store_type = field_type[0] + "32"
1482        value := Load(vs, offset).send(:"#{field_type}")
1483        set_primitive(vd, send(:"#{field_type}to#{store_type}", value)).send(:"#{store_type}")
1484      }
1485    end
1486  } Else {
1487    set_primitive(vd, Load(vs, offset).u32).u32
1488  }
1489end
1490
1491macro(:handle_ldobj_v_64_v4_v4_id16) do |vd, vs, id|
1492  If(vs, 0).EQ.Unlikely {
1493    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1494    move_to_exception
1495  }
1496  field := field_offset(id)
1497  If(field, 0).EQ.Unlikely {
1498    move_to_exception
1499  }
1500
1501  assert_non_volatile(field)
1502
1503  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1504  value := Load(vs, offset).u64
1505  set_primitive(vd, value).u64
1506end
1507
1508macro(:handle_ldobj_v_obj_v4_v4_id16) do |vd, vs, id|
1509  If(vs, 0).EQ.Unlikely {
1510    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1511    move_to_exception
1512  }
1513  field := field_offset(id)
1514  If(field, 0).EQ.Unlikely {
1515    move_to_exception
1516  }
1517
1518  assert_non_volatile(field)
1519
1520  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1521  value := Load(vs, offset).ref
1522  set_object(vd, value).ref
1523end
1524
1525macro(:handle_ststatic_id16) do |id|
1526  update_bytecode_offset
1527
1528  field := static_field(id, false)
1529  # no restore because acc holds primitive value
1530
1531  If(field, 0).EQ.Unlikely {
1532    move_to_exception
1533  }
1534
1535  assert_non_volatile(field)
1536
1537  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1538  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1539  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1540  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1541
1542  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1543    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1544      If(field_type_id, typeid).EQ {
1545        acc_type = field_type[0] + "32"
1546        Store(field_class, offset, acc.send(:"#{acc_type}")).send(:"#{field_type}")
1547      }
1548    end
1549  } Else {
1550    Store(field_class, offset, acc.u32).u32
1551  }
1552end
1553
1554macro(:handle_ststatic_64_id16) do |id|
1555  update_bytecode_offset
1556
1557  field := static_field(id, false)
1558  # no restore because acc holds primitive value
1559
1560  If(field, 0).EQ.Unlikely {
1561    move_to_exception
1562  }
1563
1564  assert_non_volatile(field)
1565
1566  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1567  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1568  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1569  Store(field_class, offset, acc.u64).u64
1570end
1571
1572macro(:handle_ststatic_obj_id16) do |id|
1573  update_bytecode_offset
1574  field := static_field(id)
1575  acc := Phi(acc, acc_restored).ref
1576  If(field, 0).EQ.Unlikely {
1577    move_to_exception
1578  }
1579
1580  assert_non_volatile(field)
1581
1582  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1583  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref_uint
1584  class_managed_object := LoadI(bitcast_to_ref(field_class)).Imm(Constants::BASE_CLASS_MANAGED_OBJECT_OFFSET).ref_uint
1585  offset_managed_object := Add(offset, Sub(field_class, class_managed_object).ref_uint).u32
1586
1587  Store(bitcast_to_ref(class_managed_object).ref, offset_managed_object, acc.ref).SetNeedBarrier(true).ref
1588end
1589
1590macro(:handle_ldstatic_id16) do |id|
1591  update_bytecode_offset
1592  save_acc()
1593  field := static_field(id, false)
1594  # no restore as acc is going to be redefined
1595  If(field, 0).EQ.Unlikely {
1596    move_to_exception
1597  }
1598
1599  assert_non_volatile(field)
1600
1601  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1602  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1603  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1604  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1605
1606  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1607    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1608      If(field_type_id, typeid).EQ {
1609        store_type = field_type[0] + "32"
1610        value := Load(field_class, offset).send(:"#{field_type}")
1611        acc_value := send(:"#{field_type}to#{store_type}", value)
1612      }
1613      acc := Phi(acc.u64, acc_value.u64).u64
1614    end
1615    acc_casted_slow := acc
1616  } Else {
1617    acc_casted_fast := u32tou64(Load(field_class, offset).u32)
1618  }
1619
1620  acc := Phi(acc_casted_slow.u64, acc_casted_fast.u64).u64
1621  acc_tag := Constants::PRIMITIVE_TAG
1622end
1623
1624macro(:handle_ldstatic_64_id16) do |id|
1625  update_bytecode_offset
1626  save_acc()
1627  field := static_field(id, false)
1628  # no restore as acc is going to be redefined
1629  If(field, 0).EQ.Unlikely {
1630    move_to_exception
1631  }
1632
1633  assert_non_volatile(field)
1634
1635  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1636  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1637  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1638  acc := Load(field_class, offset).u64
1639  acc_tag := Constants::PRIMITIVE_TAG
1640end
1641
1642macro(:handle_ldstatic_obj_id16) do |id|
1643  update_bytecode_offset
1644  save_acc()
1645  field := static_field(id, false)
1646  # no restore as acc is going to be redefined
1647  If(field, 0).EQ.Unlikely {
1648    move_to_exception
1649  }
1650
1651  assert_non_volatile(field)
1652
1653  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1654  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref_uint
1655  class_managed_object := LoadI(bitcast_to_ref(field_class)).Imm(Constants::BASE_CLASS_MANAGED_OBJECT_OFFSET).ref_uint
1656  offset_managed_object := Add(offset, Sub(field_class, class_managed_object).ref_uint).u32
1657
1658  value := Load(bitcast_to_ref(class_managed_object), offset_managed_object).ref
1659  set_acc_object(value).ref
1660end
1661
1662macro(:handle_isinstance_id16) do |id|
1663  type := type_ptr(id, true)
1664  acc := Phi(acc, acc_restored).ref
1665  If(type, 0).EQ.Unlikely {
1666    move_to_exception
1667  }
1668  set_acc_primitive(call_runtime("IsInstanceByIdEntrypoint", acc.ref, type).u32)
1669end
1670
1671macro(:handle_checkcast_id16) do |id|
1672  type := type_ptr(id, true)
1673  acc := Phi(acc, acc_restored).ref
1674  If(type, 0).EQ.Unlikely {
1675    move_to_exception
1676  }
1677  If(call_runtime("CheckCastByIdEntrypoint", acc.ref, type).u32, 0).NE.Unlikely {
1678    move_to_exception
1679  }
1680end
1681
1682macro(:handle_sta_obj_v8) do |vd|
1683  set_object(vd, acc.ref).ref
1684end
1685
1686macro(:handle_lda_obj_v8) do |vs|
1687  set_acc_object(vs)
1688end
1689
1690macro(:handle_mov_null_v8) do |vd|
1691  set_object(vd, 0).ref
1692end
1693
1694macro(:handle_lda_null) do
1695  set_acc_object(0)
1696end
1697
1698['eq', 'ne', 'lt', 'gt', 'le', 'ge'].each do |cc|
1699  ['8', '16'].each do |from|
1700    macro(:"handle_j#{cc}_v8_imm#{from}") do |pc, vs, imm, size|
1701      method_ptr := get_method_ptr()
1702      If(acc.i32, vs).send(:"#{cc.upcase}") {
1703        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1704        update_branch_taken(method_ptr)
1705        pc1 := instrument_branches(imm_casted, :"i32", method_ptr)
1706      } Else {
1707        update_branch_untaken(method_ptr)
1708        pc2 := advance_pc_imm(pc, size)
1709      }
1710      load_to_acc_reg(Phi(acc_sf, acc).i32, Phi(acc_tag_sf, acc_tag).i64)
1711      frame := Phi(frame_sf, %frame).ptr
1712      if Options.arm64?
1713        moffset := Phi(moffset_sf, %moffset).word
1714        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1715      end
1716      Phi(pc1, pc2).ptr
1717    end
1718  end
1719end
1720
1721['ne', 'eq', 'lt', 'gt', 'le', 'ge'].each do |cc|
1722  ['8', '16'].each do |from|
1723    macro(:"handle_j#{cc}z_imm#{from}") do |pc, imm, size|
1724      method_ptr := get_method_ptr()
1725      If(acc.i32, 0).send(:"#{cc.upcase}") {
1726        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1727        update_branch_taken(method_ptr)
1728        pc1 := instrument_branches(imm_casted, :"i32", method_ptr)
1729      } Else {
1730        update_branch_untaken(method_ptr)
1731        pc2 := advance_pc_imm(pc, size)
1732      }
1733      load_to_acc_reg(Phi(acc_sf, acc).i32, Phi(acc_tag_sf, acc_tag).i64)
1734      frame := Phi(frame_sf, %frame).ptr
1735      if Options.arm64?
1736        moffset := Phi(moffset_sf, %moffset).word
1737        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1738      end
1739      Phi(pc1, pc2).ptr
1740    end
1741  end
1742end
1743
1744macro(:"handle_fcmpg_v8") do |vs|
1745  acc := Cmp(acc.f32, vs).SrcType("DataType::FLOAT32").Fcmpg(true).i32
1746end
1747
1748macro(:"handle_fcmpg_64_v8") do |vs|
1749  acc := Cmp(acc.f64, vs).SrcType("DataType::FLOAT64").Fcmpg(true).i32
1750end
1751
1752macro(:"handle_fcmpl_v8") do |vs|
1753  acc := Cmp(acc.f32, vs).i32
1754end
1755
1756macro(:"handle_fcmpl_64_v8") do |vs|
1757  acc := Cmp(acc.f64, vs).i32
1758end
1759
1760['ne', 'eq'].each do |cc|
1761  ['8', '16'].each do |from|
1762    macro(:"handle_j#{cc}_obj_v8_imm#{from}") do |pc, vs, imm, size|
1763      method_ptr := get_method_ptr()
1764      If(vs, acc.ref).send(:"#{cc.upcase}") {
1765        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1766        update_branch_taken(method_ptr)
1767        pc1 := instrument_branches(imm_casted, :"ref", method_ptr)
1768      } Else {
1769        update_branch_untaken(method_ptr)
1770        pc2 := advance_pc_imm(pc, size)
1771      }
1772      load_to_acc_reg(Phi(acc_sf, acc).ref, Phi(acc_tag_sf, acc_tag).i64)
1773      frame := Phi(frame_sf, %frame).ptr
1774      if Options.arm64?
1775        moffset := Phi(moffset_sf, %moffset).word
1776        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1777      end
1778      Phi(pc1, pc2).ptr
1779    end
1780  end
1781end
1782
1783['ne', 'eq'].each do |cc|
1784  ['8', '16'].each do |from|
1785    macro(:"handle_j#{cc}z_obj_imm#{from}") do |pc, imm, size|
1786      method_ptr := get_method_ptr()
1787      If(acc.ref, 0).send(:"#{cc.upcase}") {
1788        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1789        update_branch_taken(method_ptr)
1790        pc1 := instrument_branches(imm_casted, :"ref", method_ptr)
1791      } Else {
1792        update_branch_untaken(method_ptr)
1793        pc2 := advance_pc_imm(pc, size)
1794      }
1795      load_to_acc_reg(Phi(acc_sf, acc).ref, Phi(acc_tag_sf, acc_tag).i64)
1796      frame := Phi(frame_sf, %frame).ptr
1797      if Options.arm64?
1798        moffset := Phi(moffset_sf, %moffset).word
1799        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1800      end
1801      Phi(pc1, pc2).ptr
1802    end
1803  end
1804end
1805
1806# Conversions from integer types to u1
1807
1808['i32', 'i64', 'u32', 'u64'].each do |from|
1809  macro(:"handle_#{from}tou1") do
1810    acc := send(:"#{from}tou1", acc.send(from))
1811  end
1812end
1813
1814# Integer truncations and extensions
1815
1816['i32', 'u32'].each do |from|
1817  macro(:"handle_#{from}toi64") do
1818    acc := send(:"#{from}toi64", acc.send(from))
1819  end
1820end
1821
1822['i32', 'u32'].each do |from|
1823  ['i16', 'u16', 'i8', 'u8'].each do |to|
1824    macro(:"handle_#{from}to#{to}") do
1825      value := send(:"#{from}to#{to}", acc.send(from))
1826      to_expanded = to.gsub(/\d+/,"32")
1827      acc := send(:"#{to}to#{to_expanded}", value)
1828    end
1829  end
1830end
1831
1832macro(:handle_i64toi32) do
1833  acc := i64toi32(acc.i64)
1834end
1835
1836['i32', 'u32'].each do |to|
1837  macro(:"handle_u64to#{to}") do
1838    acc := send(:"u64to#{to}", acc.u64)
1839  end
1840end
1841
1842# Conversions between integer and floating point types
1843
1844['i32', 'u32', 'i64', 'u64'].each do |from|
1845  ['f32', 'f64'].each do |to|
1846    macro(:"handle_#{from}to#{to}") do
1847      acc := send(:"#{from}to#{to}", acc.send(from))
1848    end
1849  end
1850end
1851
1852['f64', 'i32', 'i64', 'u32', 'u64'].each do |to|
1853  macro(:"handle_f32to#{to}") do
1854    acc := send(:"f32to#{to}", acc.f32)
1855  end
1856end
1857
1858['i32', 'i64', 'u32', 'u64', 'f32'].each do |to|
1859  macro(:"handle_f64to#{to}") do
1860    acc := send("f64to#{to}", acc.f64)
1861  end
1862end
1863
1864macro(:handle_mov_64) do |vd, vs|
1865  set_primitive(vd, vs).u64
1866end
1867
1868macro(:handle_mov_obj) do |vd, vs|
1869  set_object(vd, vs).ref
1870end
1871
1872macro(:handle_lda_64) do |vs|
1873  set_acc_primitive(vs)
1874end
1875
1876macro(:handle_sta_64_v8) do |vd|
1877  set_primitive(vd, acc.u64).u64
1878end
1879
1880macro(:handle_i32tof64) do
1881  acc := i32tof64(acc.i32)
1882end
1883
1884macro(:handle_fmovi_v8_imm) do |vd, imm|
1885  set_primitive(vd, imm).f32
1886end
1887
1888macro(:handle_fmovi_64_v8_imm) do |vd, imm|
1889  set_primitive(vd, imm).f64
1890end
1891
1892macro(:get_callee) do |id, is_virt, is_initobj, v, imm = nil|
1893  update_bytecode_offset
1894  if is_initobj
1895    callee := cache_entry(id, false, false, :ptr, nil)
1896  else
1897    callee := callee_ptr(id, true)
1898    acc := Phi(acc, acc_restored).send(acc.type)
1899    If(callee, 0).EQ.Unlikely {
1900      move_to_exception
1901    }
1902  end
1903  if !is_initobj
1904    method_flags := LoadI(callee).Imm(Constants::METHOD_ACCESS_FLAGS_OFFSET).Volatile(true).u32
1905    If(AndI(method_flags).Imm("panda::ACC_STATIC").u32, 0).EQ.Unlikely {
1906      receiver = get_receiver(v, imm)
1907      receiver_word := Bitcast(receiver).SrcType("DataType::POINTER").word
1908      receiver_ref = Cast(receiver_word).SrcType(Options.arch_64_bits? ? "DataType::UINT64" : "DataType::UINT32").ref_uint
1909      If(receiver_ref, 0).EQ.Unlikely {
1910        call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1911        move_to_exception
1912      }
1913      if is_virt
1914        callee_virt := call_runtime("ResolveVirtualMethod", callee, %frame, receiver_ref, %pc, method_ptr).ptr
1915      else
1916        callee_virt := callee
1917      end
1918      }
1919    Phi(callee, callee_virt).ptr
1920  else
1921    callee
1922  end
1923end
1924
1925['initobj', 'call', 'call_virt'].each do |op|
1926  macro(:"handle_#{op}_short_v4_v4_id16") do |v1, v2, id, size|
1927    is_initobj = (op == 'initobj')
1928    callee := get_callee(id, op.include?('virt'), is_initobj, v1)
1929    copy_lambda := lambda { |new_frame, num_vregs, _, new_moffset|
1930      copy_reg(new_frame, num_vregs, v1, new_moffset)
1931      copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1932    }
1933    if is_initobj
1934      initobj_call(id, size, callee, 2, copy_lambda, 0, v1)
1935    else
1936      generic_call(id, size, is_initobj, callee, 2, copy_lambda)
1937    end
1938  end
1939end
1940
1941['call', 'call_virt'].each do |op|
1942  macro(:"handle_#{op}_acc_short_v4_imm4_id16") do |v, imm, id, size|
1943    callee := get_callee(id, op.include?('virt'), false, v, imm)
1944    generic_call(id, size, false, callee, 2, lambda do |new_frame, num_vregs, _, new_moffset|
1945      If(imm, 0).EQ {
1946        copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, num_vregs), new_moffset)
1947        copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v, new_moffset)
1948      } Else {
1949        copy_reg(new_frame, num_vregs, v, new_moffset)
1950        copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(1).word), new_moffset)
1951      }
1952    end)
1953  end
1954end
1955
1956['call', 'call_virt'].each do |op|
1957  macro(:"handle_#{op}_acc_v4_v4_v4_imm4_id16") do |v1, v2, v3, imm, id, size|
1958    callee := get_callee(id, op.include?('virt'), false, v1, imm)
1959    generic_call(id, size, false, callee, 4, lambda do |new_frame, num_vregs, _, new_moffset|
1960      If(imm, 0).EQ {
1961        copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, num_vregs), new_moffset)
1962        copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v1, new_moffset)
1963        copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v2, new_moffset)
1964        copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3, new_moffset)
1965      } Else {
1966        If(imm, 1).EQ {
1967          copy_reg(new_frame, num_vregs, v1, new_moffset)
1968          copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(1).word), new_moffset)
1969          copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v2, new_moffset)
1970          copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3, new_moffset)
1971        } Else {
1972          If(imm, 2).EQ {
1973            copy_reg(new_frame, num_vregs, v1, new_moffset)
1974            copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1975            copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(2).word), new_moffset)
1976            copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3, new_moffset)
1977          } Else {
1978            # TODO(mbolshov): assert imm==3
1979            copy_reg(new_frame, num_vregs, v1, new_moffset)
1980            copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1981            copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v3, new_moffset)
1982            copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(3).word), new_moffset)
1983          }
1984        }
1985      }
1986    end)
1987  end
1988end
1989
1990['initobj', 'call', 'call_virt'].each do |op|
1991  macro(:"handle_#{op}_v4_v4_v4_v4_id16") do |v1, v2, v3, v4, id, size|
1992    is_initobj = (op == 'initobj')
1993    callee := get_callee(id, op.include?('virt'), is_initobj, v1)
1994    copy_lambda := lambda { |new_frame, num_vregs, _, new_moffset|
1995      copy_reg(new_frame, num_vregs, v1, new_moffset)
1996      copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1997      copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v3, new_moffset)
1998      copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v4, new_moffset)
1999    }
2000    if is_initobj
2001      initobj_call(id, size, callee, 4, copy_lambda, 1, v1)
2002    else
2003      generic_call(id, size, false, callee, 4, copy_lambda)
2004    end
2005  end
2006end
2007
2008['initobj', 'call', 'call_virt'].each do |op|
2009  macro(:"handle_#{op}_range_v8_id16") do |v, id, size|
2010    is_initobj = (op == 'initobj')
2011    callee := get_callee(id, op.include?('virt'), is_initobj, v)
2012    copy_lambda := lambda { |new_frame, num_vregs, num_args, new_moffset|
2013      dst_ptr_0 := frame_vreg_ptr(new_frame, num_vregs)
2014      src_ptr_0 := vreg_ptr(v)
2015      i0 := 0
2016      Label(:Head)  # TODO(mbolshov): use While loops when they are ready
2017      i := Phi(i0, i1).word
2018      If(i, num_args).EQ.Unlikely do
2019        Goto(:Exit)
2020      end
2021      offset := Mul(i, Constants::VREGISTER_SIZE).word
2022      dst_ptr := Add(dst_ptr_0, offset).ptr
2023      src_ptr := Add(src_ptr_0, offset).ptr
2024      set_value(dst_ptr, get_value(src_ptr).i64)
2025      set_tag_frame(new_frame, dst_ptr, get_tag(src_ptr), new_moffset)
2026      i1 := Add(i, 1).word
2027      Goto(:Head)
2028      Label(:Exit)
2029    }
2030    if is_initobj
2031      initobj_call(id, size, callee, nil, copy_lambda, 2, v)
2032    else
2033      generic_call(id, size, false, callee, nil, copy_lambda)
2034    end
2035  end
2036end
2037
2038[:handle_return, :handle_return_64, :handle_return_obj].each do |handler|
2039  macro(handler) do
2040    generic_return(lambda { |prev_frame, _| copy_acc(acc_ptr_frame(prev_frame)) })
2041  end
2042end
2043
2044macro(:handle_fake_return) do
2045    frame_flags := LoadI(%frame).Imm(Constants::FRAME_FLAGS_OFFSET).word
2046
2047    If(And(frame_flags, "Frame::IS_STACKLESS").word, 0).EQ.Unlikely {
2048      Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
2049    }
2050
2051    fake_frame := LoadI(%frame).Imm(Constants::FRAME_PREV_FRAME_OFFSET).ptr
2052    if Options.arm64?
2053      fake_moffset := get_moffset_frame(fake_frame)
2054      fake_method_ptr := get_method_ptr_frame(fake_frame)
2055    end
2056    fake_pc := LoadI(fake_frame).Imm(Constants::FRAME_NEXT_INSTRUCTION_OFFSET).ptr
2057
2058    If(And(frame_flags, "Frame::IS_INITOBJ").word, 0).NE.Unlikely {
2059      fake_acc_initobj := LoadI(acc_ptr_frame(fake_frame)).Imm(0).send(acc.type)
2060      fake_acc_tag_initobj := Constants::OBJECT_TAG
2061    } Else {
2062      fake_acc_general := restore_acc().send(acc.type)
2063      fake_acc_tag_general := restore_acc_tag()
2064    }
2065    fake_acc := Phi(fake_acc_initobj, fake_acc_general).send(acc.type)
2066    fake_acc_tag := Phi(fake_acc_tag_initobj, fake_acc_tag_general).i64
2067    StoreI(%tr, fake_frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
2068    call_runtime("FreeFrameInterp", frame, %tr).void
2069
2070    If(exception_val(), 0).NE.Unlikely {
2071      frame := fake_frame
2072      fake_frame_insts := LoadI(fake_frame).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
2073      fake_frame_bc_offset := LoadI(fake_frame).Imm(Constants::FRAME_BYTECODE_OFFSET).u64
2074      pc := Add(fake_frame_insts, fake_frame_bc_offset).ptr
2075      move_to_exception
2076    }
2077end
2078
2079macro(:handle_return_void) do
2080  generic_return(lambda { |prev_frame, cur_frame_flags|
2081    If(And(cur_frame_flags, "Frame::IS_INITOBJ").word, 0).NE.Unlikely do
2082      acc_obj := LoadI(acc_ptr_frame(prev_frame)).Imm(0).send(acc.type)
2083      acc_tag_obj := Constants::OBJECT_TAG
2084    end
2085    load_to_acc_reg(Phi(acc, acc_obj).send(acc.type), Phi(acc_tag.i64, acc_tag_obj).i64)
2086  })
2087end
2088
2089include_plugin 'interpreter_handlers'
2090
2091# Functions:
2092
2093function(:ExecuteImplFast,
2094         params: { 'tr' => 'ptr', 'pc' => 'ptr', 'frame' => 'ptr', 'dispatch_table' => 'ptr' },
2095         regmap: handler_regmap,
2096         regalloc_set: $panda_mask,
2097         mode: [:InterpreterEntry],
2098         validate: InterpreterValidation) do
2099  # Arm32 is not supported
2100  if Options.arch == :arm32
2101    Intrinsic(:UNREACHABLE).Terminator.void
2102    next
2103  end
2104  # Setup registers according to internal interpreter calling convention:
2105  LiveOut(tr).DstReg(regmap[:tr]).ptr
2106  LiveOut(frame).DstReg(regmap[:frame]).ptr
2107  if Options.arm64?
2108    moffset := get_moffset_frame(frame)
2109    method_ptr := get_method_ptr_frame(frame)
2110    LiveOut(moffset).DstReg(regmap[:moffset]).word
2111    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
2112  end
2113
2114  # To prevent falling during frame verification, while acc is not initialized
2115  acc := LoadI(frame).Imm(Constants::GET_ACC_OFFSET).ptr
2116  acc_tag := LoadI(AddI(frame).Imm(Constants::GET_ACC_OFFSET).ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
2117
2118  LiveOut(acc).DstReg(regmap[:acc]).ptr
2119  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr
2120
2121  dispatch(dispatch_table, pc)
2122end
2123
2124function(:ExecuteImplFastEH,
2125         params: { 'tr' => 'ptr', 'pc' => 'ptr', 'frame' => 'ptr', 'dispatch_table' => 'ptr' },
2126         regmap: handler_regmap,
2127         regalloc_set: $panda_mask,
2128         mode: [:InterpreterEntry],
2129         validate: InterpreterValidation) do
2130  # Arm32 is not supported
2131  if Options.arch == :arm32
2132    Intrinsic(:UNREACHABLE).Terminator.void
2133    next
2134  end
2135  # Setup registers according to internal interpreter calling convention:
2136  LiveOut(tr).DstReg(regmap[:tr]).ptr
2137  LiveOut(frame).DstReg(regmap[:frame]).ptr
2138  if Options.arm64?
2139    moffset := get_moffset_frame(frame)
2140    method_ptr := get_method_ptr_frame(frame)
2141    LiveOut(moffset).DstReg(regmap[:moffset]).word
2142    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
2143  end
2144
2145  # To prevent falling during frame verification, while acc is not initialized
2146  acc := LoadI(frame).Imm(Constants::GET_ACC_OFFSET).ptr
2147  acc_tag := LoadI(AddI(frame).Imm(Constants::GET_ACC_OFFSET).ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
2148
2149  LiveOut(acc).DstReg(regmap[:acc]).ptr
2150  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr
2151
2152  LiveOut(pc).DstReg(regmap[:pc]).ptr
2153  LiveOut(dispatch_table).DstReg(regmap[:dispatch]).ptr
2154  addr := Load(dispatch_table, Panda::dispatch_table.handler_names.size * 8).ptr
2155  tail_call(addr)
2156end
2157
2158Panda.instructions.each do |i|
2159  op = i.operands # alias for brevity
2160  mode = [:Interpreter]
2161  mode.push(:DynamicMethod, :DynamicStub) if i.properties.include?('dynamic')
2162  lang =  i.namespace == 'core' ? 'PANDA_ASSEMBLY' : i.namespace.upcase
2163
2164  # Remove profile part from the handler name, thereby we avoid adjusting of handler names each time we add profile
2165  # info for an instruction.
2166  handler_name = i.handler_name.gsub(/_PROF\d+/, '')
2167
2168  function("HANDLE_FAST_#{handler_name}",
2169           regmap: handler_regmap,
2170           regalloc_set: $panda_mask,
2171           mode: mode,
2172           lang: lang,
2173           validate: InterpreterValidation) do
2174    # Arm32 is not supported
2175    if Options.arch == :arm32
2176      Intrinsic(:UNREACHABLE).Terminator.void
2177      next
2178    end
2179    src_acc_type = i.acc_and_operands.select(&:src?).select(&:acc?).first&.type&.to_sym
2180    acc_type_map = {
2181      :b32 => :u32,
2182      :b64 => :u64,
2183      :u1 => :u32,
2184      :u8 => :u32,
2185      :u16 => :u32,
2186      :i8 => :i32,
2187      :i16 => :i32,
2188      :any => :u64,
2189      :top => :ptr
2190    }
2191    storage_type_map = {
2192      :f32 => :u32,
2193      :f64 => :u64,
2194      :b32 => :u32,
2195      :b64 => :u64,
2196      :u1 => :u8,
2197      :any => :u64,
2198      :top => :ptr
2199    }
2200    acc_src_storage_type = storage_type_map[src_acc_type] || src_acc_type || :ptr
2201    if i.properties.include?('dynamic')  # investigate and remove this if-clause
2202      save_acc().send(acc_src_storage_type)
2203    end
2204    if defines.DEBUG
2205      call_runtime("DebugPrintEntrypoint", %frame, %pc, %acc, %acc_tag).void
2206    end
2207    if src_acc_type == :f32
2208      acc := Bitcast(%acc.u32).SrcType("DataType::UINT32").f32
2209    elsif src_acc_type == :f64
2210      acc := Bitcast(%acc.u64).SrcType("DataType::UINT64").f64
2211    else
2212      acc := %acc.send(acc_src_storage_type)
2213    end
2214
2215    acc_tag := (%acc_tag).sword
2216    pc := %pc
2217    table := %dispatch
2218    frame := %frame
2219    if Options.arm64?
2220      moffset := (%moffset).word
2221      method_ptr := %method_ptr
2222    end
2223    tr := %tr
2224
2225    if defines.DEBUG
2226      if !i.properties.include?('dynamic')
2227        i.acc_and_operands.each do |o|
2228          if o.dst? && !o.src?
2229            next
2230          end
2231          if o.acc?
2232            if o.type == "ref" || (o.type.include? "[]")
2233              assert_has_object_eq(acc_tag.u64)
2234            elsif ([o.type] & ['none', 'top', 'any']).empty?
2235              assert_has_object_ne(acc_tag.u64)
2236            end
2237          elsif o.reg?
2238            # No need check virtual register tag, in case mov.obj and deoptimized frame.
2239            #   newobj v1, #some_record#
2240            #      ...
2241            #   mov.obj v2, v1
2242            #   mov.obj v2, v3
2243            # Object in v1 below first "mov.obj" is dead (can be deleted, because isn't used anywhere in method below).
2244            # Instruction "mov" don't exist in compiler, object in v1 dead for compiler early, so isn't written in nearest SaveState above mov.
2245            # If deoptimization happen, value of register v1 will be incorrect in interpreter. Assert on tag obj in first mov.obj, which is written below this comment
2246            # will fail, but it doesn't matter, because object isn't used below.
2247
2248            if handler_name.start_with? "MOV_OBJ"
2249              frame_flags := LoadI(%frame).Imm(Constants::FRAME_FLAGS_OFFSET).word
2250              If(And(frame_flags, "Frame::IS_DEOPTIMIZED").word, 0).NE.Unlikely {
2251                Goto(:SkipCheck)
2252              }
2253            end
2254            if o.type == "ref" || (o.type.include? "[]")
2255              assert_has_object_eq(get_tag(vreg_ptr(o)))
2256            elsif ([o.type] & ['none', 'top', 'any']).empty?
2257              assert_has_object_ne(get_tag(vreg_ptr(o)))
2258            end
2259            Label(:SkipCheck)
2260          end
2261        end
2262      end
2263    end
2264
2265    case handler_name
2266    when "NOP"
2267    # mov
2268    when "MOVI_V4_IMM4", "MOVI_V8_IMM8"
2269      handle_movi(vreg_ptr(op[0]), i8toi32(as_imm(op[1])))
2270    when "MOVI_V8_IMM16"
2271      handle_movi(vreg_ptr(op[0]), i16toi32(as_imm(op[1])))
2272    when "MOVI_V8_IMM32"
2273      handle_movi(vreg_ptr(op[0]), as_imm(op[1]))
2274    when "MOVI_64_V8_IMM64"
2275      handle_movi_64(vreg_ptr(op[0]), as_imm(op[1]))
2276    when "MOV_V4_V4", "MOV_V8_V8", "MOV_V16_V16"
2277      handle_mov(vreg_ptr(op[0]), vreg_value(op[1]).u32)
2278    when "MOV_64_V4_V4", "MOV_64_V16_V16"
2279      handle_mov_64(vreg_ptr(op[0]), vreg_value(op[1]).u64)
2280    when "MOV_OBJ_V4_V4", "MOV_OBJ_V8_V8", "MOV_OBJ_V16_V16"
2281      handle_mov_obj(vreg_ptr(op[0]), vreg_value(op[1]).ref)
2282    when "MOV_NULL_V8"
2283      handle_mov_null_v8(vreg_ptr(op[0]))
2284    when "FMOVI_PREF_V8_IMM32"
2285      handle_fmovi_v8_imm(vreg_ptr(op[0]), as_imm(op[1]))
2286    when "FMOVI_64_V8_IMM64"
2287      handle_fmovi_64_v8_imm(vreg_ptr(op[0]), as_imm(op[1]).f64)
2288    # lda
2289    when "LDA_V8"
2290      handle_lda(vreg_value(op[0]).u32)
2291    when "LDA_64_V8"
2292      handle_lda_64(vreg_value(op[0]).u64)
2293    when "LDA_OBJ_V8"
2294      handle_lda_obj_v8(vreg_value(op[0]).ref)
2295    when "LDA_STR_ID32"
2296      handle_lda_str_id32(as_id(op[0]))
2297    when "LDA_TYPE_ID16"
2298      handle_lda_type_id16(as_id(op[0]))
2299    when "LDA_CONST_V8_ID16"
2300      handle_lda_const_v8_id16(vreg_ptr(op[0]), as_id(op[1]))
2301    when "LDAI_IMM8"
2302      handle_ldai_imm(i8toi32(as_imm(op[0])))
2303    when "LDAI_IMM16"
2304      handle_ldai_imm(i16toi32(as_imm(op[0])))
2305    when "LDAI_IMM32"
2306      handle_ldai_imm(as_imm(op[0]))
2307    when "LDAI_64_IMM64"
2308      handle_ldai_64_imm(as_imm(op[0]))
2309    when "FLDAI_PREF_IMM32"
2310      handle_fldai_imm(as_imm(op[0]))
2311    when "FLDAI_64_IMM64"
2312      handle_fldai_64_imm(as_imm(op[0]))
2313    when "LDA_NULL"
2314      handle_lda_null()
2315    when "LENARR_V8"
2316      handle_lenarr_v8(vreg_value(op[0]).ref)
2317    when "LDARR_V8"
2318      handle_ldarr_v8(vreg_value(op[0]).ref)
2319    when "LDARR_8_V8"
2320      handle_ldarr_8_v8(vreg_value(op[0]).ref)
2321    when "LDARR_16_V8"
2322      handle_ldarr_16_v8(vreg_value(op[0]).ref)
2323    when "LDARRU_8_V8"
2324      handle_ldarru_8_v8(vreg_value(op[0]).ref)
2325    when "LDARRU_16_V8"
2326      handle_ldarru_16_v8(vreg_value(op[0]).ref)
2327    when "LDARR_64_V8"
2328      handle_ldarr_64_v8(vreg_value(op[0]).ref)
2329    when "FLDARR_32_V8"
2330      handle_fldarr_32_v8(vreg_value(op[0]).ref)
2331    when "FLDARR_64_V8"
2332      handle_fldarr_64_v8(vreg_value(op[0]).ref)
2333    when "LDARR_OBJ_V8"
2334      handle_ldarr_obj_v8(vreg_value(op[0]).ref)
2335    when "LDOBJ_V8_ID16"
2336      handle_ldobj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2337    when "LDOBJ_V_V4_V4_ID16"
2338      handle_ldobj_v_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
2339    when "LDOBJ_64_V8_ID16"
2340      handle_ldobj_64_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2341    when "LDOBJ_V_64_V4_V4_ID16"
2342      handle_ldobj_v_64_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
2343    when "LDOBJ_OBJ_V8_ID16"
2344      handle_ldobj_obj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2345    when "LDOBJ_V_OBJ_V4_V4_ID16"
2346      handle_ldobj_v_obj_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
2347    when "LDSTATIC_ID16"
2348      handle_ldstatic_id16(as_id(op[0]))
2349    when "LDSTATIC_64_ID16"
2350      handle_ldstatic_64_id16(as_id(op[0]))
2351    when "LDSTATIC_OBJ_ID16"
2352      handle_ldstatic_obj_id16(as_id(op[0]))
2353    # sta
2354    when "STA_V8"
2355      handle_sta_v8(vreg_ptr(op[0]))
2356    when "STA_64_V8"
2357      handle_sta_64_v8(vreg_ptr(op[0]))
2358    when "STA_OBJ_V8"
2359      handle_sta_obj_v8(vreg_ptr(op[0]))
2360    when "STARR_V4_V4"
2361      handle_starr_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2362    when "STARR_8_V4_V4"
2363      handle_starr_8_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2364    when "STARR_16_V4_V4"
2365      handle_starr_16_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2366    when "STARR_64_V4_V4"
2367      handle_starr_64_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2368    when "FSTARR_32_V4_V4"
2369      handle_fstarr_32_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2370    when "FSTARR_64_V4_V4"
2371      handle_fstarr_64_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2372    when "STARR_OBJ_V4_V4"
2373      handle_starr_obj_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2374    when "STOBJ_V8_ID16"
2375      handle_stobj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2376    when "STOBJ_64_V8_ID16"
2377      handle_stobj_64_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2378    when "STOBJ_OBJ_V8_ID16"
2379      handle_stobj_obj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2380    when "STOBJ_V_V4_V4_ID16"
2381      handle_stobj_v_v4_v4_id16(vreg_value(op[0]).u32, vreg_value(op[1]).ref, as_id(op[2]))
2382    when "STOBJ_V_64_V4_V4_ID16"
2383      handle_stobj_v_64_v4_v4_id16(vreg_value(op[0]).u64, vreg_value(op[1]).ref, as_id(op[2]))
2384    when "STOBJ_V_OBJ_V4_V4_ID16"
2385      handle_stobj_v_obj_v4_v4_id16(vreg_value(op[0]).ref, vreg_value(op[1]).ref, as_id(op[2]))
2386    when "STSTATIC_ID16"
2387      handle_ststatic_id16(as_id(op[0]))
2388    when "STSTATIC_64_ID16"
2389      handle_ststatic_64_id16(as_id(op[0]))
2390    when "STSTATIC_OBJ_ID16"
2391      handle_ststatic_obj_id16(as_id(op[0]))
2392    # jmp
2393    when "JMP_IMM8"
2394      pc := handle_jmp_imm(pc, i8toi32(as_imm(op[0])))
2395    when "JMP_IMM16"
2396      pc := handle_jmp_imm(pc, i16toi32(as_imm(op[0])))
2397    when "JMP_IMM32"
2398      pc := handle_jmp_imm(pc, as_imm(op[0]))
2399    # conditional jumps
2400    # NB! Better not to load jump offset when condition is false
2401    when "JEQ_V8_IMM8"
2402      pc := handle_jeq_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2403    when "JEQ_V8_IMM16"
2404      pc := handle_jeq_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2405    when "JNE_V8_IMM8"
2406      pc := handle_jne_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2407    when "JNE_V8_IMM16"
2408      pc := handle_jne_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2409    when "JLT_V8_IMM8"
2410      pc := handle_jlt_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2411    when "JLT_V8_IMM16"
2412      pc := handle_jlt_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2413    when "JGT_V8_IMM8"
2414      pc := handle_jgt_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2415    when "JGT_V8_IMM16"
2416      pc := handle_jgt_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2417    when "JLE_V8_IMM8"
2418      pc := handle_jle_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2419    when "JLE_V8_IMM16"
2420      pc := handle_jle_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2421    when "JGE_V8_IMM8"
2422      pc := handle_jge_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2423    when "JGE_V8_IMM16"
2424      pc := handle_jge_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2425    when "JEQZ_IMM8"
2426      pc := handle_jeqz_imm8(pc, as_imm(op[0]), i.format.size)
2427    when "JEQZ_IMM16"
2428      pc := handle_jeqz_imm16(pc, as_imm(op[0]), i.format.size)
2429    when "JNEZ_IMM8"
2430      pc := handle_jnez_imm8(pc, as_imm(op[0]), i.format.size)
2431    when "JNEZ_IMM16"
2432      pc := handle_jnez_imm16(pc, as_imm(op[0]), i.format.size)
2433    when "JLTZ_IMM8"
2434      pc := handle_jltz_imm8(pc, as_imm(op[0]), i.format.size)
2435    when "JLTZ_IMM16"
2436      pc := handle_jltz_imm16(pc, as_imm(op[0]), i.format.size)
2437    when "JGTZ_IMM8"
2438      pc := handle_jgtz_imm8(pc, as_imm(op[0]), i.format.size)
2439    when "JGTZ_IMM16"
2440      pc := handle_jgtz_imm16(pc, as_imm(op[0]), i.format.size)
2441    when "JLEZ_IMM8"
2442      pc := handle_jlez_imm8(pc, as_imm(op[0]), i.format.size)
2443    when "JLEZ_IMM16"
2444      pc := handle_jlez_imm16(pc, as_imm(op[0]), i.format.size)
2445    when "JGEZ_IMM8"
2446      pc := handle_jgez_imm8(pc, as_imm(op[0]), i.format.size)
2447    when "JGEZ_IMM16"
2448      pc := handle_jgez_imm16(pc, as_imm(op[0]), i.format.size)
2449    when "JNEZ_OBJ_IMM8"
2450      pc := handle_jnez_obj_imm8(pc, as_imm(op[0]), i.format.size)
2451    when "JNEZ_OBJ_IMM16"
2452      pc := handle_jnez_obj_imm16(pc, as_imm(op[0]), i.format.size)
2453    when "JEQZ_OBJ_IMM8"
2454      pc := handle_jeqz_obj_imm8(pc, as_imm(op[0]), i.format.size)
2455    when "JEQZ_OBJ_IMM16"
2456      pc := handle_jeqz_obj_imm16(pc, as_imm(op[0]), i.format.size)
2457    when "JNE_OBJ_V8_IMM8"
2458      pc := handle_jne_obj_v8_imm8(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2459    when "JNE_OBJ_V8_IMM16"
2460      pc := handle_jne_obj_v8_imm16(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2461    when "JEQ_OBJ_V8_IMM8"
2462      pc := handle_jeq_obj_v8_imm8(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2463    when "JEQ_OBJ_V8_IMM16"
2464      pc := handle_jeq_obj_v8_imm16(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2465    # cmp
2466    when "FCMPG_PREF_V8"
2467      handle_fcmpg_v8(vreg_value(op[0]).f32)
2468    when "FCMPG_64_V8"
2469      handle_fcmpg_64_v8(vreg_value(op[0]).f64)
2470    when "FCMPL_PREF_V8"
2471      handle_fcmpl_v8(vreg_value(op[0]).f32)
2472    when "FCMPL_64_V8"
2473      handle_fcmpl_64_v8(vreg_value(op[0]).f64)
2474    when "UCMP_PREF_V8"
2475      handle_ucmp(acc.u32, vreg_value(op[0]).u32)
2476    when "UCMP_64_PREF_V8"
2477      handle_ucmp(acc.u64, vreg_value(op[0]).u64)
2478    when "CMP_64_V8"
2479      handle_cmp(acc.i64, vreg_value(op[0]).i64)
2480    # add
2481    when "ADD_V4_V4"
2482      handle_add_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2483    when "ADDV_V4_V4"
2484      handle_add_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2485    when "INCI_V4_IMM4"
2486      handle_inci_v4_imm4(vreg_ptr(op[0]), i8toi32(as_imm(op[1])))
2487    when "ADDI_IMM8"
2488      handle_addi_imm(i8toi32(as_imm(op[0])))
2489    when "ADDIV_V4_V4_IMM8"
2490      handle_addi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2491    when "ADD2_V8"
2492      handle_add2_v8(vreg_value(op[0]).i32)
2493    when "ADD2_64_V8"
2494      handle_add2_64_v8(vreg_value(op[0]).i64)
2495    when "FADD2_64_V8"
2496      handle_fadd2_64_v8(vreg_value(op[0]).f64)
2497    when "FADD2_PREF_V8"
2498      handle_fadd2_v8(vreg_value(op[0]).f32)
2499    when "ADD2V_V8_V8"
2500      handle_add2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2501    when "ADD2V_64_V8_V8"
2502      handle_add2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2503    when "FADD2V_64_V8_V8"
2504      handle_fadd2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2505    when "FADD2V_PREF_V8_V8"
2506      handle_fadd2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2507    # sub
2508    when "FSUB2_PREF_V8"
2509      handle_fsub2_v8(vreg_value(op[0]).f32)
2510    when "FSUB2V_PREF_V8_V8"
2511      handle_fsub2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2512    when "SUB_V4_V4"
2513      handle_sub_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2514    when "SUBV_V4_V4"
2515      handle_sub_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2516    when "SUB2_V8"
2517      handle_sub2_v8(vreg_value(op[0]).i32)
2518    when "SUB2_64_V8"
2519      handle_sub2_64_v8(vreg_value(op[0]).i64)
2520    when "SUB2V_V8_V8"
2521      handle_sub2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2522    when "SUB2V_64_V8_V8"
2523      handle_sub2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2524    when "SUBI_IMM8"
2525      handle_subi_imm(i8toi32(as_imm(op[0])))
2526    when "SUBIV_V4_V4_IMM8"
2527      handle_subi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2528    when "FSUB2_64_V8"
2529      handle_fsub2_64_v8(vreg_value(op[0]).f64)
2530    when "SUB2_V8"
2531      handle_sub2_v8(vreg_value(op[0]).i32)
2532    when "FSUB2_64_V8"
2533      handle_fsub2_64_v8(vreg_value(op[0]).f64)
2534    when "FSUB2V_64_V8_V8"
2535      handle_fsub2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2536    when "SUB2V_V8_V8"
2537      handle_sub2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2538    when "FSUB2V_64_V8_V8"
2539      handle_fsub2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2540    # mul
2541    when "MUL_V4_V4"
2542      handle_mul_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2543    when "MULV_V4_V4"
2544      handle_mul_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2545    when "MUL2_V8"
2546      handle_mul2_v8(vreg_value(op[0]).i32)
2547    when "FMUL2_PREF_V8"
2548      handle_fmul2_v8(vreg_value(op[0]).f32)
2549    when "MUL2_64_V8"
2550      handle_mul2_64_v8(vreg_value(op[0]).i64)
2551    when "MUL2V_V8_V8"
2552      handle_mul2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2553    when "FMUL2V_PREF_V8_V8"
2554      handle_fmul2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2555    when "MUL2V_64_V8_V8"
2556      handle_mul2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2557    when "MULI_IMM8"
2558      handle_muli_imm(i8toi32(as_imm(op[0])))
2559    when "MULIV_V4_V4_IMM8"
2560      handle_muli_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2561    when "FMUL2_64_V8"
2562      handle_fmul2_64_v8(vreg_value(op[0]).f64)
2563    when "FMUL2V_64_V8_V8"
2564      handle_fmul2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2565    # div
2566    when "FDIV2_PREF_V8"
2567      handle_fdiv2_v8(vreg_value(op[0]).f32)
2568    when "FDIV2_64_V8"
2569      handle_fdiv2_64_v8(vreg_value(op[0]).f64)
2570    when "FDIV2V_PREF_V8_V8"
2571      handle_fdiv2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2572    when "FDIV2V_64_V8_V8"
2573      handle_fdiv2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2574    when "DIV_V4_V4"
2575      handle_div_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2576    when "DIVV_V4_V4"
2577      handle_div_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2578    when "DIV2_V8"
2579      handle_div2_v8(vreg_value(op[0]).i32)
2580    when "DIV2V_V8_V8"
2581      handle_div2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2582    when "DIVI_IMM8"
2583      handle_divi_imm(i8toi32(as_imm(op[0])))
2584    when "DIVIV_V4_V4_IMM8"
2585      handle_divi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2586    when "DIV2_64_V8"
2587      handle_div2_64_v8(vreg_value(op[0]).i64)
2588    when "DIVU2_PREF_V8"
2589      handle_divu2_v8(vreg_value(op[0]).i32)
2590    when "DIVU2_64_PREF_V8"
2591      handle_divu2_64_v8(vreg_value(op[0]).i64)
2592    when "DIV2V_64_V8_V8"
2593      handle_div2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2594    when "DIVU2V_PREF_V8_V8"
2595      handle_divu2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2596    when "DIVU2V_64_PREF_V8_V8"
2597      handle_divu2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2598    # mod
2599    when "FMOD2_PREF_V8"
2600      handle_fmod2_v8(vreg_value(op[0]).f32)
2601    when "FMOD2_64_V8"
2602      handle_fmod2_64_v8(vreg_value(op[0]).f64)
2603    when "FMOD2V_PREF_V8_V8"
2604      handle_fmod2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2605    when "FMOD2V_64_V8_V8"
2606      handle_fmod2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2607    when "MOD_V4_V4"
2608      handle_mod_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2609    when "MODV_V4_V4"
2610      handle_mod_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2611    when "MOD2_V8"
2612      handle_mod2_v8(vreg_value(op[0]).i32)
2613    when "MOD2V_V8_V8"
2614      handle_mod2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2615    when "MODI_IMM8"
2616      handle_modi_imm(i8toi32(as_imm(op[0])))
2617    when "MODIV_V4_V4_IMM8"
2618      handle_modi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2619    when "MOD2_64_V8"
2620      handle_mod2_64_v8(vreg_value(op[0]).i64)
2621    when "MODU2_PREF_V8"
2622      handle_modu2_v8(vreg_value(op[0]).u32)
2623    when "MODU2_64_PREF_V8"
2624      handle_modu2_64_v8(vreg_value(op[0]).u64)
2625    when "MOD2V_64_V8_V8"
2626      handle_mod2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2627    when "MODU2V_PREF_V8_V8"
2628      handle_modu2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).u32)
2629    when "MODU2V_64_PREF_V8_V8"
2630      handle_modu2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).u64)
2631    # neg
2632    when "FNEG_64"
2633      handle_fneg_64()
2634    when "FNEG_PREF_NONE"
2635      handle_fneg()
2636    # and
2637    when "AND2_PREF_V8"
2638      handle_and2_v8(vreg_value(op[0]).i32)
2639    when "AND2_64_PREF_V8"
2640      handle_and2_64_v8(vreg_value(op[0]).i64)
2641    when "AND2V_PREF_V8_V8"
2642      handle_and2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2643    when "AND2V_64_PREF_V8_V8"
2644      handle_and2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2645    when "ANDI_IMM32"
2646      handle_andi_imm(as_imm(op[0]))
2647    when "ANDIV_V4_V4_IMM32"
2648      handle_andi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2649    when "AND_PREF_V4_V4"
2650      handle_and_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2651    when "ANDV_PREF_V4_V4"
2652      handle_and_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2653    # or
2654    when "OR2_PREF_V8"
2655      handle_or2_v8(vreg_value(op[0]).i32)
2656    when "OR2_64_PREF_V8"
2657      handle_or2_64_v8(vreg_value(op[0]).i64)
2658    when "OR2V_PREF_V8_V8"
2659      handle_or2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2660    when "OR2V_64_PREF_V8_V8"
2661      handle_or2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2662    when "ORI_IMM32"
2663      handle_ori_imm(as_imm(op[0]))
2664    when "ORIV_V4_V4_IMM32"
2665      handle_ori_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2666    when "OR_PREF_V4_V4"
2667      handle_or_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2668    when "ORV_PREF_V4_V4"
2669      handle_or_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2670    # ashr
2671    when "ASHR2_PREF_V8"
2672      handle_ashr2_v8(vreg_value(op[0]).i32)
2673    when "ASHR2_64_PREF_V8"
2674      handle_ashr2_64_v8(vreg_value(op[0]).i64)
2675    when "ASHR2V_PREF_V8_V8"
2676      handle_ashr2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2677    when "ASHR2V_64_PREF_V8_V8"
2678      handle_ashr2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2679    when "ASHRI_IMM8"
2680      handle_ashri_imm(as_imm(op[0]))
2681    when "ASHRIV_V4_V4_IMM8"
2682      handle_ashri_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2683    when "ASHR_PREF_V4_V4"
2684      handle_ashr_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2685    when "ASHRV_PREF_V4_V4"
2686      handle_ashr_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2687    # shr
2688    when "SHRI_IMM8"
2689      handle_shri_imm(i8toi32(as_imm(op[0])))
2690    when "SHRIV_V4_V4_IMM8"
2691      handle_shri_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2692    when "SHR_PREF_V4_V4"
2693      handle_shr_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2694    when "SHRV_PREF_V4_V4"
2695      handle_shr_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2696    when "SHR2_PREF_V8"
2697      handle_shr2_v8(vreg_value(op[0]).i32)
2698    when "SHR2_64_PREF_V8"
2699      handle_shr2_64_v8(vreg_value(op[0]).i64)
2700    when "SHR2V_PREF_V8_V8"
2701      handle_shr2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2702    when "SHR2V_64_PREF_V8_V8"
2703      handle_shr2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2704    # xor
2705    when "XOR2_PREF_V8"
2706      handle_xor2_v8(vreg_value(op[0]).i32)
2707    when "XOR2_64_PREF_V8"
2708      handle_xor2_64_v8(vreg_value(op[0]).i64)
2709    when "XOR2V_PREF_V8_V8"
2710      handle_xor2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2711    when "XOR2V_64_PREF_V8_V8"
2712      handle_xor2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2713    when "XORI_PREF_IMM32"
2714      handle_xori_imm(as_imm(op[0]))
2715    when "XORIV_PREF_V4_V4_IMM32"
2716      handle_xori_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2717    when "XOR_PREF_V4_V4"
2718      handle_xor_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2719    when "XORV_PREF_V4_V4"
2720      handle_xor_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2721    # shl
2722    when "SHLI_IMM8"
2723      handle_shli_imm(i8toi32(as_imm(op[0])))
2724    when "SHLIV_V4_V4_IMM8"
2725      handle_shli_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2726    when "SHL_PREF_V4_V4"
2727      handle_shl_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2728    when "SHLV_PREF_V4_V4"
2729      handle_shl_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2730    when "SHL2_PREF_V8"
2731      handle_shl2_v8(vreg_value(op[0]).i32)
2732    when "SHL2_64_PREF_V8"
2733      handle_shl2_64_v8(vreg_value(op[0]).i64)
2734    when "SHL2V_PREF_V8_V8"
2735      handle_shl2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2736    when "SHL2V_64_PREF_V8_V8"
2737      handle_shl2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2738    # not
2739    when "NOT_PREF_NONE"
2740      handle_not()
2741    when "NOT_64_PREF_NONE"
2742      handle_not_64()
2743    # neg
2744    when "NEG"
2745      handle_neg()
2746    when "NEG_64"
2747      handle_neg_64()
2748    # new
2749    when "NEWARR_V4_V4_ID16"
2750      handle_newarr_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_id(op[2]))
2751    when "NEWOBJ_V8_ID16"
2752      handle_newobj_v8_id16(vreg_ptr(op[0]), as_id(op[1]))
2753    # checks
2754    when "ISINSTANCE_ID16"
2755      handle_isinstance_id16(as_id(op[0]))
2756    when "CHECKCAST_ID16"
2757      handle_checkcast_id16(as_id(op[0]))
2758    # cast
2759    when "I32TOU1_PREF_NONE"
2760      handle_i32tou1()
2761    when "I64TOU1_PREF_NONE"
2762      handle_i64tou1()
2763    when "U32TOU1_PREF_NONE"
2764      handle_u32tou1()
2765    when "U64TOU1_PREF_NONE"
2766      handle_u64tou1()
2767    when "I32TOI64_PREF_NONE"
2768      handle_i32toi64()
2769    when "I32TOI16_PREF_NONE"
2770      handle_i32toi16()
2771    when "I32TOU16_PREF_NONE"
2772      handle_i32tou16()
2773    when "I32TOI8_PREF_NONE"
2774      handle_i32toi8()
2775    when "I32TOU8_PREF_NONE"
2776      handle_i32tou8()
2777    when "I64TOI32_PREF_NONE"
2778      handle_i64toi32()
2779    when "U32TOI64_PREF_NONE"
2780      handle_u32toi64()
2781    when "U32TOI16_PREF_NONE"
2782      handle_u32toi16()
2783    when "U32TOU16_PREF_NONE"
2784      handle_u32tou16()
2785    when "U32TOI8_PREF_NONE"
2786      handle_u32toi8()
2787    when "U32TOU8_PREF_NONE"
2788      handle_u32tou8()
2789    when "U64TOI32_PREF_NONE"
2790      handle_u64toi32()
2791    when "U64TOU32_PREF_NONE"
2792      handle_u64tou32()
2793    when "I32TOF32_PREF_NONE"
2794      handle_i32tof32()
2795    when "I32TOF64_PREF_NONE"
2796      handle_i32tof64()
2797    when "U32TOF32_PREF_NONE"
2798      handle_u32tof32()
2799    when "U32TOF64_PREF_NONE"
2800      handle_u32tof64()
2801    when "I64TOF32_PREF_NONE"
2802      handle_i64tof32()
2803    when "I64TOF64_PREF_NONE"
2804      handle_i64tof64()
2805    when "U64TOF32_PREF_NONE"
2806      handle_u64tof32()
2807    when "U64TOF64_PREF_NONE"
2808      handle_u64tof64()
2809    when "F32TOF64_PREF_NONE"
2810      handle_f32tof64()
2811    when "F32TOI32_PREF_NONE"
2812      handle_f32toi32()
2813    when "F32TOI64_PREF_NONE"
2814      handle_f32toi64()
2815    when "F32TOU32_PREF_NONE"
2816      handle_f32tou32()
2817    when "F32TOU64_PREF_NONE"
2818      handle_f32tou64()
2819    when "F64TOI32_PREF_NONE"
2820      handle_f64toi32()
2821    when "F64TOI64_PREF_NONE"
2822      handle_f64toi64()
2823    when "F64TOU32_PREF_NONE"
2824      handle_f64tou32()
2825    when "F64TOU64_PREF_NONE"
2826      handle_f64tou64()
2827    when "F64TOF32_PREF_NONE"
2828      handle_f64tof32()
2829    # call
2830    when "CALL_SHORT_V4_V4_ID16"
2831      handle_call_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
2832    when "CALL_ACC_SHORT_V4_IMM4_ID16"
2833      handle_call_acc_short_v4_imm4_id16(op[1], as_imm(op[2]), as_id(op[0]), i.format.size)
2834    when "CALL_ACC_V4_V4_V4_IMM4_ID16"
2835      handle_call_acc_v4_v4_v4_imm4_id16(op[1], op[2], op[3], as_imm(op[4]), as_id(op[0]), i.format.size)
2836    when "CALL_V4_V4_V4_V4_ID16"
2837      handle_call_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
2838    when "CALL_RANGE_V8_ID16"
2839      handle_call_range_v8_id16(op[1], as_id(op[0]), i.format.size)
2840    when "CALL_VIRT_SHORT_V4_V4_ID16"
2841      handle_call_virt_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
2842    when "CALL_VIRT_ACC_SHORT_V4_IMM4_ID16"
2843      handle_call_virt_acc_short_v4_imm4_id16(op[1], as_imm(op[2]), as_id(op[0]), i.format.size)
2844    when "CALL_VIRT_V4_V4_V4_V4_ID16"
2845      handle_call_virt_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
2846    when "CALL_VIRT_ACC_V4_V4_V4_IMM4_ID16"
2847      handle_call_virt_acc_v4_v4_v4_imm4_id16(op[1], op[2], op[3], as_imm(op[4]), as_id(op[0]), i.format.size)
2848    when "CALL_VIRT_RANGE_V8_ID16"
2849      handle_call_virt_range_v8_id16(op[1], as_id(op[0]), i.format.size)
2850    when "INITOBJ_SHORT_V4_V4_ID16"
2851      handle_initobj_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
2852    when "INITOBJ_V4_V4_V4_V4_ID16"
2853      handle_initobj_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
2854    when "INITOBJ_RANGE_V8_ID16"
2855      handle_initobj_range_v8_id16(op[1], as_id(op[0]), i.format.size)
2856    # return
2857    when "RETURN_VOID"
2858      handle_return_void()
2859    when "RETURN"
2860      handle_return()
2861    when "RETURN_64"
2862      handle_return_64()
2863    when "RETURN_OBJ"
2864      handle_return_obj()
2865    # dyn
2866    when "MOV_DYN_V8_V8"
2867      set_value(vreg_ptr(op[0]), vreg_value(op[1]).any).any
2868    when "STA_DYN_V8"
2869      set_value(vreg_ptr(op[0]), acc.any).any
2870    when "LDA_DYN_V8"
2871      acc := vreg_value(op[0]).any
2872    when "LDAI_DYN_IMM32"
2873      acc := i32toany(as_imm(op[0]).i32)
2874    when "FLDAI_DYN_IMM64"
2875      acc := f64toany(as_imm(op[0]).f64)
2876    # throw
2877    when "THROW_V8"
2878      handle_throw(vreg_value(op[0]).ref)
2879
2880include_plugin 'interpreter_main_loop'
2881
2882    else
2883      Intrinsic(:UNREACHABLE).Terminator.void
2884    end
2885
2886    if (i.properties & ['jump', 'call', 'return']).empty?
2887      if !i.exceptions.include?('x_throw')
2888        if i.exceptions.include?('x_ecma')
2889          If(exception_val(), 0).NE.Unlikely {
2890            pc_eh := find_catch_block()
2891          } Else {
2892            pc_inc := advance_pc_imm(pc, i.format.size)
2893          }
2894          frame := Phi(frame_eh, frame).ptr
2895          if Options.arm64?
2896            moffset := Phi(moffset_eh, moffset).word
2897            method_ptr := Phi(method_ptr_eh, method_ptr).ptr
2898          end
2899          pc := Phi(pc_eh, pc_inc).ptr
2900          acc := Phi(acc_eh.any, acc.any).any
2901        else
2902          pc := advance_pc_imm(pc, i.format.size)
2903        end
2904      end
2905    end
2906
2907    dst_acc_type = i.acc_and_operands.select(&:dst?).select(&:acc?).first&.type&.to_sym
2908    src_acc_type = i.acc_and_operands.select(&:src?).select(&:acc?).first&.type&.to_sym
2909
2910    acc_type = dst_acc_type || src_acc_type || :u64
2911
2912    acc_type = acc_type_map[acc_type] || acc_type
2913    if acc_type == :f32
2914      acc := Bitcast(acc.f32).SrcType("DataType::FLOAT32").u32
2915      acc_type = :u32
2916    elsif acc_type == :f64
2917      acc := Bitcast(acc.f64).SrcType("DataType::FLOAT64").u64
2918      acc_type = :u64
2919    end
2920    LiveOut(acc).DstReg(regmap[:acc]).send(acc_type)
2921    LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr  # actually u64 but let's correspond to LiveIn's type
2922    LiveOut(frame).DstReg(regmap[:frame]).ptr
2923    if Options.arm64?
2924      LiveOut(moffset).DstReg(regmap[:moffset]).word
2925      LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
2926    end
2927    LiveOut(tr).DstReg(regmap[:tr]).ptr
2928
2929    dispatch(table, pc)
2930  end
2931end
2932
2933Panda.prefixes.each do |p|
2934  function("HANDLE_FAST_#{p.handler_name}",
2935           regmap: handler_regmap,
2936           regalloc_set: $panda_mask,
2937           mode: [:Interpreter],
2938           validate: InterpreterValidation) do
2939    # Arm32 is not supported
2940    if Options.arch == :arm32
2941      Intrinsic(:UNREACHABLE).Terminator.void
2942      next
2943    end
2944    pc := %pc
2945    table := %dispatch
2946
2947    secondary_opcode := readbyte(pc, 1)
2948    offset_idx := AddI(u8toword(secondary_opcode)).Imm(Panda.dispatch_table.secondary_opcode_offset(p)).word
2949    offset := Mul(offset_idx, "WordSize()").word
2950    addr := Load(table, offset).ptr
2951
2952    LiveOut(%acc).DstReg(regmap[:acc]).u64
2953    LiveOut(%acc_tag).DstReg(regmap[:acc_tag]).u64
2954    LiveOut(pc).DstReg(regmap[:pc]).ptr
2955    LiveOut(table).DstReg(regmap[:dispatch]).ptr
2956    LiveOut(%frame).DstReg(regmap[:frame]).ptr
2957    if Options.arm64?
2958      LiveOut(%moffset).DstReg(regmap[:moffset]).word
2959      LiveOut(%method_ptr).DstReg(regmap[:method_ptr]).ptr
2960    end
2961    LiveOut(%tr).DstReg(regmap[:tr]).ptr
2962
2963    tail_call(addr)
2964  end
2965end
2966
2967function(:HANDLE_FAST_INVALID,
2968         regmap: handler_regmap,
2969         regalloc_set: $panda_mask,
2970         mode: [:Interpreter],
2971         validate: InterpreterValidation) do
2972  Intrinsic(:UNREACHABLE).Terminator.void
2973end
2974
2975function(:HANDLE_FAST_EXCEPTION,
2976         regmap: handler_regmap,
2977         regalloc_set: $panda_mask,
2978         mode: [:Interpreter],
2979         validate: InterpreterValidation) do
2980  # Arm32 is not supported
2981  if Options.arch == :arm32
2982    Intrinsic(:UNREACHABLE).Terminator.void
2983    next
2984  end
2985  table := %dispatch
2986  pc := %pc
2987
2988  #assert pending exception
2989  pc := find_catch_block()
2990  frame := frame_eh
2991  if Options.arm64?
2992    moffset := moffset_eh
2993    method_ptr := method_ptr_eh
2994  end
2995  load_to_acc_reg(acc_eh, acc_tag_eh)
2996
2997  LiveOut(acc).DstReg(regmap[:acc]).u64
2998  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).u64
2999  LiveOut(frame).DstReg(regmap[:frame]).ptr
3000  if Options.arm64?
3001    LiveOut(moffset).DstReg(regmap[:moffset]).word
3002    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
3003  end
3004  LiveOut(%tr).DstReg(regmap[:tr]).ptr
3005
3006  dispatch(table, pc)
3007end
3008