• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env ruby
2
3# Copyright (c) 2021-2024 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16include_relative 'common.irt'
17
18fixed_regmap = Regmap.new({
19  arm32: { dispatch: 12, pc: 4, frame: 8, acc: 7 },
20  arm64: { dispatch: 24, pc: 20, frame: 23, acc: 21, acc_tag: 22, moffset: 25, method_ptr: 26 },
21  x86_64: { dispatch: 8, pc: 4, frame: 5, acc: 11, acc_tag: 3 },
22})
23handler_regmap = $full_regmap + fixed_regmap
24
25def check_regmap(lhs, rhs, name)
26  regs_intersection = lhs.data.values & rhs.data.values
27  raise "Fixed register numbers should not intersect with '#{name}' registers" unless regs_intersection.empty?
28end
29
30if Options.arm64?  # other archs have no enough regs
31  # fixed registers assignment sanity checks:
32  check_regmap(fixed_regmap, $panda_regmap, 'panda')
33  check_regmap(fixed_regmap, $arch_regmap, 'arch')
34  check_regmap(fixed_regmap, $args_regmap, 'args')
35  check_regmap(fixed_regmap, $callers_regmap, 'caller')
36end
37
38InterpreterValidation = {
39  spills_count_max: 32  # should be synced with SPILL_SLOTS in codegen_interpreter.h
40}
41
42# Macros:
43
44# Casts:
45
46['8', '16'].each do |from|
47  ['u32', 'u64'].each do |to|
48    macro(:"u#{from}to#{to}") do |arg|
49      Cast(arg).SrcType("DataType::UINT#{from}").send(to)
50    end
51  end
52end
53
54['8', '16'].each do |from|
55  macro(:"i#{from}toi32") do |arg|
56    Cast(arg).SrcType("DataType::INT#{from}").i32
57  end
58end
59
60['8', '16'].each do |from|
61  macro(:"i#{from}toi64") do |arg|
62    Cast(arg).SrcType("DataType::INT#{from}").i64
63  end
64end
65
66
67[['u32', 'UINT32'], ['i32', 'INT32']].each do |from, from_type|
68  ['b', 'i8', 'u8', 'i16', 'u16', 'i64', 'u64'].each do |to|
69    macro(:"#{from}to#{to}") do |arg|
70      Cast(arg).SrcType("DataType::#{from_type}").send(to)
71    end
72  end
73end
74
75['b', 'u32', 'i32', 'u8', 'i8', 'i16', 'u16', 'i64'].each do |to|
76  macro(:"u64to#{to}") do |arg|
77    Cast(arg).SrcType("DataType::UINT64").send(to)
78  end
79end
80
81['b', 'i32'].each do |to|
82  macro(:"i64to#{to}") do |arg|
83    Cast(arg).SrcType("DataType::INT64").send(to)
84  end
85end
86
87macro(:"btou32") do |arg|
88    Cast(arg).SrcType("DataType::BOOL").send('u32')
89end
90
91macro(:"btou8") do |arg|
92  Cast(arg).SrcType("DataType::BOOL").send('u8')
93end
94
95[['u32', 'UINT32'], ['i32', 'INT32'], ['u64', 'UINT64'], ['i64', 'INT64']].each do |from, from_type|
96  ['f32', 'f64'].each do |to|
97    macro(:"#{from}to#{to}") do |arg|
98      Cast(arg).SrcType("DataType::#{from_type}").send(to)
99    end
100  end
101end
102
103['f64', 'i32', 'u32', 'i64', 'u64'].each do |to|
104  macro(:"f32to#{to}") do |arg|
105    Cast(arg).SrcType("DataType::FLOAT32").send(to)
106  end
107end
108
109['i32', 'u32', 'i64', 'u64', 'f32'].each do |to|
110  macro(:"f64to#{to}") do |arg|
111    Cast(arg).SrcType("DataType::FLOAT64").send(to)
112  end
113end
114
115['i32', 'i64', 'u32', 'u64'].each do |from|
116  macro(:"#{from}tou1") do |arg|
117    res0 := 0
118    If(arg, 0).NE do
119      res1 := 1
120    end
121    Phi(res0, res1).i32
122  end
123end
124
125['u8', 'u16'].each do |from|
126  macro(:"#{from}toword") do |arg|
127    if Options.arch_64_bits?
128      send(:"#{from}tou64", arg)
129    else
130      send(:"#{from}tou32", arg)
131    end
132  end
133end
134
135macro(:u32toword) do |arg|
136  if Options.arch_64_bits?
137    u32tou64(arg)
138  else
139    arg
140  end
141end
142
143macro(:bitcast_to_ref) do |value|
144  set_no_hoist_flag(Bitcast(value).SrcType(Constants::REF_UINT).ref)
145end
146
147# to be redefined in plugins
148macro(:i32toany) do |arg|
149  OrI(i32toi64(arg)).Imm("ark::coretypes::TaggedValue::TAG_INT").i64
150end
151
152macro(:f64toany) do |arg|
153  CastValueToAnyType(arg).AnyType(Constants::DYN_UNDEFINED).any
154end
155
156# Decoding
157
158macro(:readbyte) do |pc, offset|
159  LoadI(pc).Imm(offset).u8
160end
161
162macro(:read_lower_4bits) do |offset|
163  if Options.arm64?
164    imm := readbyte(pc, offset).u32
165    AndI(imm).Imm(0xf).u8
166  else
167    imm := readbyte(pc, offset).u8
168    AndI(imm).Imm(0xf).u8
169  end
170end
171
172macro(:signed_read_higher_4bits) do |offset|
173  if Options.arm64?
174    imm := readbyte(pc, offset).i32
175    shl_imm := ShlI(imm).Imm(24).i32
176    i32toi8(AShrI(shl_imm).Imm(28).i32)
177  else
178    imm := readbyte(pc, offset).i8
179    AShrI(imm).Imm(4).i8
180  end
181end
182
183macro(:read_higher_4bits) do |offset|
184  if Options.arm64?
185    imm := readbyte(pc, offset).u32
186    shl_imm := ShlI(imm).Imm(24).u32
187    u32tou8(ShrI(shl_imm).Imm(28).u32)
188  else
189    imm := readbyte(pc, offset).u8
190    ShrI(imm).Imm(4).u8
191  end
192end
193
194macro(:as_vreg_idx) do |operand|
195  raise 'Register is expected' unless operand.reg?
196
197  offset = operand.offset / 8
198  case operand.width
199  when 4
200    u8toword(operand.offset % 8 != 0 ? read_higher_4bits(offset) : read_lower_4bits(offset))
201  when 8
202    u8toword(readbyte(pc, offset))
203  when 16
204    u16toword(readbyte(pc, offset).u16)
205  end
206end
207
208macro(:as_id) do |operand|
209  raise 'ID is expected' unless operand.id?
210
211  offset = operand.offset / 8
212
213  case operand.width
214  when 16
215    readbyte(pc, offset).u16
216  when 32
217    readbyte(pc, offset).u32
218  end
219end
220
221macro(:as_imm) do |operand|
222  raise 'Immediate is expected' unless operand.imm?
223
224  offset = operand.offset / 8
225
226  case operand.width
227  when 4
228    operand.offset % 8 != 0 ? signed_read_higher_4bits(offset) : read_lower_4bits(offset)
229  when 8
230    readbyte(pc, offset).i8
231  when 16
232    readbyte(pc, offset).i16
233  when 32
234    if operand.type == 'f32'
235      readbyte(pc,offset).f32
236    else
237      readbyte(pc, offset).i32
238    end
239  when 64
240    if operand.type == 'f64'
241      readbyte(pc, offset).f64
242    else
243      readbyte(pc, offset).i64
244    end
245  end
246end
247
248macro(:ins_offset) do
249  instructions_offset := LoadI(%frame).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
250  Sub(%pc, instructions_offset).word
251end
252
253macro(:update_bytecode_offset) do
254  StoreI(%frame, ins_offset).Imm(Constants::FRAME_BYTECODE_OFFSET).u32
255end
256
257# Register access:
258
259macro(:frame_vreg_ptr) do |frame, vreg_idx|
260  vreg_offset := AddI(Mul(vreg_idx, Constants::VREGISTER_SIZE).word).Imm(Constants::VREGISTERS_OFFSET).word
261  Add(frame, vreg_offset).ptr
262end
263
264macro(:vreg_ptr) do |operand|
265  vreg_idx := as_vreg_idx(operand)
266  frame_vreg_ptr(%frame, vreg_idx)
267end
268
269macro(:get_value) do |vreg_ptr|
270  LoadI(vreg_ptr).Imm(Constants::VREGISTER_VALUE_OFFSET)
271end
272
273macro(:set_value) do |vreg_ptr, val|
274  StoreI(vreg_ptr, val).Imm(Constants::VREGISTER_VALUE_OFFSET).send(val.is_a?(Integer) || val.is_a?(String) ? :u64 : val.type)
275end
276
277macro(:get_moffset_frame) do |frame|
278  vreg_num := LoadI(frame).Imm(Constants::VREGISTERS_NUM_OFFSET).u32
279  Mul(u32toword(vreg_num), Constants::VREGISTER_SIZE).word
280end
281
282macro(:get_moffset) do
283  Options.arm64? ? %moffset : get_moffset_frame(%frame)
284end
285
286macro(:get_method_ptr_frame) do |frame|
287  LoadI(frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
288end
289
290macro(:get_method_ptr) do
291  Options.arm64? ? %method_ptr : get_method_ptr_frame(%frame)
292end
293
294macro(:get_tag) do |vreg_ptr|
295  vreg_mirror_ptr := Add(vreg_ptr, get_moffset()).ptr
296  LoadI(vreg_mirror_ptr).Imm(Constants::VREGISTER_VALUE_OFFSET).i64
297end
298
299macro(:set_tag_frame) do |frame, vreg_ptr, tag, moffset|
300  vreg_mirror_ptr := Add(vreg_ptr, moffset).ptr
301  StoreI(vreg_mirror_ptr, tag).Imm(Constants::VREGISTER_VALUE_OFFSET).i64
302end
303
304macro(:set_tag) do |vreg_ptr, tag|
305  set_tag_frame(%frame, vreg_ptr, tag, get_moffset())
306end
307
308macro(:vreg_value) do |operand|
309  get_value(vreg_ptr(operand))
310end
311
312macro(:set_primitive) do |v, value|
313  set_tag(v, Constants::PRIMITIVE_TAG)
314  set_value(v, value)
315end
316
317macro(:set_object) do |v, value|
318  set_tag(v, Constants::OBJECT_TAG)
319  set_value(v, value)
320end
321
322macro(:copy_reg) do |new_frame, dst_idx, src_operand, new_moffset|
323  dst_reg_ptr = frame_vreg_ptr(new_frame, dst_idx)
324  src_reg_ptr = vreg_ptr(src_operand)
325  set_value(dst_reg_ptr, get_value(src_reg_ptr).i64)
326  set_tag_frame(new_frame, dst_reg_ptr, get_tag(src_reg_ptr), new_moffset)
327end
328
329# Accumulator access:
330
331macro(:acc_ptr_frame) do |frame|
332  AddI(frame).Imm(Constants::GET_ACC_OFFSET).ptr
333end
334
335macro(:acc_ptr) do
336  acc_ptr_frame(%frame)
337end
338
339macro(:has_object) do |tag|
340  AndI(tag).Imm("coretypes::TaggedValue::OBJECT_MASK").u64
341end
342
343macro(:save_acc_tag) do |tag|
344  StoreI(acc_ptr, tag).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
345end
346
347macro(:save_acc_value) do |value|
348  StoreI(%frame, value).Imm(Constants::GET_ACC_OFFSET).send(value.type)
349end
350
351macro(:save_acc) do
352  save_acc_var(%acc, %acc_tag)
353end
354
355macro(:save_acc_var) do |acc_var, acc_tag_var|
356  save_acc_tag(acc_tag_var)
357  save_acc_value(acc_var)
358end
359
360macro(:restore_acc) do
361  LoadI(%frame).Imm(Constants::GET_ACC_OFFSET)
362end
363
364macro(:restore_acc_tag) do
365  LoadI(acc_ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
366end
367
368macro(:load_to_acc_reg) do |acc_value, acc_tag_value|
369  acc_tag := acc_tag_value
370  acc := acc_value
371end
372
373macro(:set_acc_primitive) do |value|
374  load_to_acc_reg(value, Constants::PRIMITIVE_TAG)
375end
376
377macro(:set_acc_object) do |value|
378  load_to_acc_reg(value, Constants::OBJECT_TAG)
379end
380
381macro(:copy_acc) do |dst_ptr|
382  StoreI(dst_ptr, acc_tag).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
383  StoreI(dst_ptr, acc).Imm(0).send(acc.type)
384end
385
386macro(:copy_acc_to_reg) do |new_frame, dst_ptr, new_moffset = nil|
387  set_tag_frame(new_frame, dst_ptr, acc_tag, new_moffset)
388  set_value(dst_ptr, acc)
389end
390
391["eq", "ne"].each do |cc|
392  macro(:"assert_has_object_#{cc}") do |vreg_ptr|
393    If(has_object(vreg_ptr), 0).send(:"#{cc.upcase}").Unlikely {
394      Intrinsic(:UNREACHABLE).Terminator.void
395    }
396  end
397end
398
399# Helper macros:
400
401macro(:tail_call) do |addr|
402  Intrinsic(:TAIL_CALL, addr).Terminator.void
403  Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
404end
405
406macro(:dispatch) do |table, pc|
407  opc := readbyte(pc, 0)
408  offset := Mul(u8toword(opc), "WordSize()").word
409  addr := Load(table, offset).ptr
410  LiveOut(pc).DstReg(regmap[:pc]).ptr
411  LiveOut(table).DstReg(regmap[:dispatch]).ptr
412  tail_call(addr)
413end
414
415macro(:call_runtime) do |sym, *args|
416  Call(*args).Method(sym)
417end
418
419macro(:advance_pc_imm) do |pc, imm|
420  AddI(pc).Imm(imm).ptr
421end
422
423macro(:advance_pc_var) do |pc, var|
424  Add(pc, var).ptr
425end
426
427macro(:acc_receiver) do |op, imm|
428  If(imm, 0).EQ {
429    res1 := acc.ptr
430  } Else {
431    res2 := vreg_value(op).ptr
432  }
433  Phi(res1, res2).ptr
434end
435
436macro(:get_receiver) do |v, imm|
437  if imm
438    acc_receiver(v, imm)
439  else
440    vreg_value(v).ptr
441  end
442end
443
444macro(:read_uleb) do |ptr|
445  fast_uleb := u8tou32(LoadI(ptr).Imm(0).u8)
446  If(fast_uleb, 0x80).GE.Unlikely {
447    slow_uleb := call_runtime("ReadUlebEntrypoint", ptr).u32
448  }
449  Phi(fast_uleb, slow_uleb).u32
450end
451
452macro(:method_file_data) do |method_ptr|
453  panda_file := LoadI(method_ptr).Imm(Constants::METHOD_PANDA_FILE_OFFSET).ptr
454  code_id := LoadI(method_ptr).Imm(Constants::METHOD_CODE_ID_OFFSET).u32
455  base := LoadI(panda_file).Imm(0).ptr
456  method_data_ptr := Add(base, u32toword(code_id)).ptr
457end
458
459macro(:get_cache_entry_ptr) do
460  cache := AddI(%tr).Imm(Constants::THREAD_INTERPRETER_CACHE_OFFSET).ptr
461  idx := AndI(ShrI(Bitcast(%pc).SrcType("DataType::POINTER").word).Imm(2).word).Imm("InterpreterCache::N - 1").word
462  Add(cache, Mul(idx, "sizeof(InterpreterCache::Entry)").word).ptr
463end
464
465macro(:cache_entry) do |id, need_restore, need_save, type, slow_path_name, enable_slowpath = true|
466  entry_ptr := get_cache_entry_ptr()
467  entry_pc := LoadI(entry_ptr).Imm(0).ptr
468  entry_caller := LoadI(entry_ptr).Imm("sizeof(void*)").ptr
469  method_ptr := get_method_ptr()
470  If(entry_pc, %pc).EQ.Likely {
471    If(entry_caller, method_ptr).EQ.Likely {
472      fast := LoadI(entry_ptr).Imm("2*sizeof(void*)").send(type)
473    } Else {
474      Goto(:Slow)
475    }
476  } Else {
477    Label(:Slow)
478    if slow_path_name
479      if need_save
480        save_acc_var(acc, acc_tag)
481      end
482      slow := call_runtime(slow_path_name, %tr, method_ptr, u16tou32(id), entry_ptr, %pc).send(type)
483      if need_restore
484        acc_restored := restore_acc().send(acc.type)
485      end
486    else
487      slow := nullptr
488    end
489  }
490  Phi(fast, slow).send(type)
491end
492
493macro(:field_offset) do |id|
494  cache_entry(id, false, false, :ptr, "GetFieldByIdEntrypoint")
495end
496
497macro(:static_field) do |id, need_restore = true, need_save = true|
498  cache_entry(id, need_restore, need_save, :ptr, "GetStaticFieldByIdEntrypoint")
499end
500
501macro(:callee_ptr) do |id, need_save|
502  cache_entry(id, true, need_save, :ptr, "GetCalleeMethodFromBytecodeId")
503end
504
505macro(:type_ptr) do |id, need_restore = false, need_save = true|
506  cache_entry(id, need_restore, need_save, :ptr, "ResolveTypeByIdEntrypoint")
507end
508
509macro(:find_catch_block) do
510  handler_pc := call_runtime("FindCatchBlockInIFrames", %tr, %frame, pc).ptr
511  If(handler_pc, pc).EQ.Unlikely {
512    Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
513  }
514  frame_eh := LoadI(%tr).Imm(Constants::THREAD_FRAME_OFFSET).ptr
515  if Options.arm64?
516    moffset_eh := get_moffset_frame(frame_eh)
517    method_ptr_eh := get_method_ptr_frame(frame_eh)
518  end
519  acc_ptr := acc_ptr_frame(frame_eh).ptr
520  acc_eh := LoadI(acc_ptr).Imm(0).u64
521  acc_tag_eh := LoadI(AddI(acc_ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).ptr).u64
522  handler_pc
523end
524
525macro(:move_to_exception) do
526  LiveOut(table).DstReg(regmap[:dispatch]).ptr
527  LiveOut(frame).DstReg(regmap[:frame]).ptr
528  if Options.arm64?
529    LiveOut(moffset).DstReg(regmap[:moffset]).word
530    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
531  end
532  LiveOut(tr).DstReg(regmap[:tr]).ptr
533  LiveOut(pc).DstReg(regmap[:pc]).ptr
534  addr := Load(table, Panda::dispatch_table.handler_names.size * 8).ptr
535  tail_call(addr)
536end
537
538macro(:set_no_hoist_flag) do |inst|
539  inst.SetFlag("compiler::inst_flags::NO_HOIST")
540end
541
542macro(:move_to_exit) do |pc, acc, acc_tag|
543  LiveOut(acc).DstReg(regmap[:acc]).ptr
544  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr
545  LiveOut(frame).DstReg(regmap[:frame]).ptr
546  if Options.arm64?
547    LiveOut(moffset).DstReg(regmap[:moffset]).word
548    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
549  end
550  LiveOut(tr).DstReg(regmap[:tr]).ptr
551
552  dispatch(table, pc)
553end
554
555macro(:exception_val) do
556  LoadI(%tr).Imm(Constants::THREAD_EXCEPTION_OFFSET).ptr
557end
558
559# make a stop at Safepoint if the flag is set
560macro(:safepoint) do |acc_type, is_save_acc|
561  flags := LoadI(%tr).Imm(Constants::THREAD_FLAG_OFFSET).u16
562  If(flags, 0).NE.Unlikely {
563    save_acc_var(acc, acc_tag) if is_save_acc
564    call_runtime("SafepointEntrypointInterp", %tr).void
565    restored_acc := restore_acc().send(acc_type)
566  }
567  acc_ := Phi(acc, restored_acc).send(acc_type)
568end
569
570macro(:verify) do |method, is_initobj|
571  if is_initobj
572    method_flags := LoadI(method).Imm(Constants::METHOD_ACCESS_FLAGS_OFFSET).Volatile(true).u32
573  end
574  intrinsic_bit := ShrI(AndI(method_flags).Imm("ACC_INTRINSIC").u32).Imm(Constants::INTRINSIC_MASK_SHIFT).u32
575  verif_status := ShrI(AndI(method_flags).Imm("VERIFICATION_STATUS_MASK").u32).Imm("VERIFICATION_STATUS_SHIFT").u32
576  If(Or(verif_status, intrinsic_bit).u32, Constants::VERIFIED_OK).LT.Unlikely {
577    If(call_runtime("Verify", method).b, 0).EQ.Unlikely {
578      move_to_exception
579    }
580  }
581end
582
583macro(:update_hotness_counter) do |callee, is_initobj|
584  verify(callee, is_initobj)
585
586  hc := LoadI(callee).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16;
587  If(hc, 0).LE.Unlikely {
588    call_runtime("CallCompilerSlowPath", %tr, callee).void;
589  } Else {
590    StoreI(callee, SubI(hc).Imm(1).i16).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16
591  }
592end
593
594macro(:update_branch_taken) do |method_ptr|
595  prof_data := LoadI(method_ptr).Volatile(true).Imm(Constants::METHOD_NATIVE_POINTER_OFFSET).ptr
596  #TODO(mshimenkov): place likely/unlikely
597  If(prof_data, 0).NE {
598    call_runtime("UpdateBranchTaken", method_ptr, %frame, %pc, prof_data).void
599  }
600end
601
602macro(:update_branch_untaken) do |method_ptr|
603  prof_data := LoadI(method_ptr).Volatile(true).Imm(Constants::METHOD_NATIVE_POINTER_OFFSET).ptr
604  #TODO(mshimenkov): place likely/unlikely
605  If(prof_data, 0).NE {
606    call_runtime("UpdateBranchUntaken", method_ptr, %frame, %pc, prof_data).void
607  }
608end
609
610macro(:instrument_branches) do |imm, acc_type, method_ptr|
611  inc_pc := advance_pc_var(pc, i32tou64(imm))
612  If(imm, 0).LE {
613    safepoint(acc_type, true)
614
615    hc := LoadI(method_ptr).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16;
616
617    If(hc, 0).LE.Unlikely {
618      osr_success := call_runtime("CallCompilerSlowPathOSR", %tr, method_ptr, %frame, acc_, acc_tag, ins_offset, imm).b
619      IfImm(osr_success).Imm(0).NE.Unlikely {
620        handle_fake_return()
621      }
622    } Else {
623      StoreI(method_ptr, SubI(hc).Imm(1).i16).Imm(Constants::GET_HOTNESS_COUNTER_OFFSET).i16
624    }
625
626    tmp_acc := Phi(acc_, fake_acc, acc_).send(acc_type)
627    tmp_acc_tag := Phi(acc_tag, fake_acc_tag, acc_tag).i64
628    tmp_frame := Phi(%frame, fake_frame, %frame).ptr
629    if Options.arm64?
630      tmp_moffset := Phi(%moffset, fake_moffset, %moffset).word
631      tmp_method_ptr := Phi(%method_ptr, fake_method_ptr, %method_ptr).ptr
632    end
633    tmp_pc := Phi(inc_pc, fake_pc, inc_pc).ptr
634  }
635
636  acc_sf := Phi(acc, tmp_acc).send(acc_type)
637  acc_tag_sf := Phi(acc_tag, tmp_acc_tag).i64
638  frame_sf := Phi(%frame, tmp_frame).ptr
639  if Options.arm64?
640    moffset_sf := Phi(%moffset, tmp_moffset).word
641    method_ptr_sf := Phi(%method_ptr, tmp_method_ptr).ptr
642  end
643  Phi(inc_pc, tmp_pc).ptr
644end
645
646macro(:initobj_call) do |id, size, callee, nargs, copy_lambda, op_format, first_vreg|
647  If(callee, 0).EQ.Unlikely {
648    klass_1 := call_runtime("GetMethodClassById", get_method_ptr(), id).ref
649    acc_ := nullptr
650    acc_tag_ := Constants::OBJECT_TAG
651    If(klass_1, 0).EQ.Unlikely {
652      move_to_exception
653    }
654  } Else {
655    klass_2 := LoadI(callee).Imm(Constants::METHOD_CLASS_OFFSET).ref
656  }
657  acc := Phi(acc_, acc).send(acc.type)
658  acc_tag := Phi(acc_tag_, acc_tag).i64
659  klass := Phi(klass_1, klass_2).ref
660  save_acc_var(acc, acc_tag)
661
662  component_type := LoadI(klass).Imm(Constants::CLASS_COMPONENT_OFFSET).ref
663  If(component_type, 0).NE.Unlikely {
664    array := call_runtime("CreateMultiDimensionalArrayById", %tr, %frame, klass, get_method_ptr(), id, pc, op_format).ptr
665    If(array, 0).EQ.Unlikely {
666      LiveOut(array).DstReg(regmap[:acc]).ptr
667      LiveOut(Constants::OBJECT_TAG).DstReg(regmap[:acc_tag]).ptr
668      move_to_exception
669    }
670    move_to_exit(advance_pc_imm(%pc, size), array, Constants::OBJECT_TAG)
671  }
672  If(callee, 0).EQ.Unlikely {
673    entry_ptr := get_cache_entry_ptr()
674    callee_0 := call_runtime("GetCalleeMethodFromBytecodeId", %tr, get_method_ptr(), u16tou32(id), entry_ptr, %pc).ptr
675  }
676  callee_ := Phi(callee, callee_0).ptr
677  If(callee_, 0).EQ.Unlikely {
678    move_to_exception
679  }
680
681  class_flags := LoadI(klass).Imm(Constants::BASE_CLASS_FLAGS_OFFSET).u32
682  If(AndI(class_flags).Imm("ark::Class::STRING_CLASS").u32, 0).NE.Unlikely {
683    ctor_arg := vreg_value(first_vreg).ref
684    str := call_runtime("VmCreateString", %tr, callee_, ctor_arg).ptr
685    If(str, 0).EQ.Unlikely {
686      LiveOut(str).DstReg(regmap[:acc]).ptr
687      LiveOut(Constants::OBJECT_TAG).DstReg(regmap[:acc_tag]).ptr
688      move_to_exception
689    }
690    move_to_exit(advance_pc_imm(%pc, size), str, Constants::OBJECT_TAG)
691  }
692
693  obj := call_runtime("CreateObjectByClassInterpreter", %tr, klass).ptr
694  If(obj, 0).EQ.Unlikely {
695    move_to_exception
696  }
697  # no restore as acc is dead now
698  acc := obj
699  acc_tag := Constants::OBJECT_TAG
700  save_acc_var(obj, Constants::OBJECT_TAG)
701  generic_call(id, size, true, callee_, nargs, copy_lambda)
702end
703
704macro(:align_up) do |val|
705  alignment = Constants::DEFAULT_FRAME_ALIGNMENT_IN_BYTES
706  AndI(AddI(val).Imm("#{alignment} - 1U").word).Imm("~(#{alignment} - 1U)").word
707end
708
709macro(:get_alloc_size) do |size|
710  v := Mul(size, Constants::VREGISTER_SIZE).word
711  align_up(AddI(v).Imm("CORE_EXT_FRAME_DATA_SIZE + #{Constants::FRAME_SIZE}").word).word
712end
713
714macro(:create_frame) do |frame_size, callee|
715  actual_size := Add(frame_size, frame_size).word
716
717  if defines.DEBUG
718    If(callee, 0).EQ.Unlikely {
719      Intrinsic(:UNREACHABLE).Terminator.void
720    }
721  end
722  alloc_sz := get_alloc_size(actual_size)
723  mirror_sz := Mul(frame_size, Constants::VREGISTER_SIZE).word
724  mirror_offset = AddI(mirror_sz).Imm("CORE_EXT_FRAME_DATA_SIZE + #{Constants::FRAME_SIZE}").word
725  mem := call_runtime("AllocFrameInterp", %tr, alloc_sz).ptr
726  If(mem, 0).EQ.Unlikely {
727    move_to_exception
728  }
729  mirror_frame := Add(mem, mirror_offset).ptr
730  frame_end_addr := Add(mem, alloc_sz).ptr
731
732  If(mirror_frame, frame_end_addr).EQ.Unlikely do
733    Goto(:Exit_)
734  end
735  Label(:Head_)
736  mf := Phi(mirror_frame, mirror_frame_).ptr
737  StoreI(mf, 0x0).Imm(0).word
738  mirror_frame_ := AddI(mf).Imm(Constants::VREGISTER_SIZE).ptr
739  If(mf, frame_end_addr).LT.Likely do
740    Goto(:Head_)
741  end
742  Label(:Exit_)
743
744  call_runtime("InitializeFrame", mem, callee, %frame, frame_size).ptr
745end
746
747macro(:generic_call) do |id, size, is_initobj, callee, nargs, copy_lambda|
748  safepoint(acc.type, !is_initobj)
749  acc := acc_
750  update_hotness_counter(callee, is_initobj)
751
752  entrypoint := LoadI(callee).Imm(Constants::METHOD_COMPILED_ENTRY_POINT_OFFSET).ptr
753  If(call_runtime("IsCompiled", entrypoint).i32, 0).NE.Unlikely {
754    save_acc_var(acc, acc_tag) unless is_initobj
755    call_runtime("InterpreterToCompiledCodeBridge", pc, frame, callee, %tr).void
756
757    StoreI(%tr, 0).Imm(Constants::GET_FRAME_KIND_OFFSET).u16
758    StoreI(%tr, %frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
759
760    If(exception_val(), 0).NE.Unlikely {
761      move_to_exception
762    }
763    acc_native := restore_acc().send(acc.type)
764    acc_tag_native := restore_acc_tag().i64
765
766    pc_native := advance_pc_imm(%pc, size)
767  } Else {
768    num_vregs := read_uleb(method_file_data(callee))
769    num_vregs := u32toword(num_vregs)
770    num_vregs := AddI(num_vregs).Imm(1).word if is_initobj
771    if nargs
772      num_args := nargs
773    else
774      num_args := u32toword(LoadI(callee).Imm(Constants::METHOD_NUM_ARGS_OFFSET).u32)
775    end
776
777    frame_size := Add(num_vregs, num_args).word
778    new_frame := create_frame(frame_size, callee)
779
780    new_moffset := Mul(u32toword(frame_size), Constants::VREGISTER_SIZE).word
781    method_ptr := callee
782    # TODO(mbolshov): we could negate IS_STACKLESS and don't do this store every time
783    frame_flags := "Frame::IS_STACKLESS"
784    if is_initobj
785      frame_flags := Or(frame_flags, "Frame::IS_INITOBJ").word
786      obj_vreg_ptr := frame_vreg_ptr(new_frame, SubI(num_vregs).Imm(1).word)
787      set_tag_frame(new_frame, obj_vreg_ptr, Constants::OBJECT_TAG, new_moffset)
788      set_value(obj_vreg_ptr, restore_acc().send(acc.type))
789    end
790    StoreI(new_frame, frame_flags).Imm(Constants::FRAME_FLAGS_OFFSET).word
791    copy_lambda.call(new_frame, num_vregs, num_args, new_moffset)
792    StoreI(new_frame, frame).Imm(Constants::FRAME_PREV_FRAME_OFFSET).ptr
793    StoreI(frame, advance_pc_imm(pc, size)).Imm(Constants::FRAME_NEXT_INSTRUCTION_OFFSET).ptr
794    pc_int := call_runtime("GetInstructionsByMethod", callee).ptr
795    StoreI(new_frame, pc_int).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
796    StoreI(%tr, new_frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
797  }
798  load_to_acc_reg(Phi(acc_native, acc).send(acc.type), Phi(acc_tag_native, acc_tag.i64).i64)
799  frame := Phi(%frame, new_frame).ptr
800  if Options.arm64?
801    moffset := Phi(%moffset, new_moffset).word
802    method_ptr := Phi(%method_ptr, method_ptr).ptr
803  end
804  pc := Phi(pc_native, pc_int).ptr
805end
806
807macro(:generic_return) do |copy_lambda|
808  frame_flags := LoadI(%frame).Imm(Constants::FRAME_FLAGS_OFFSET).word
809  If(And(frame_flags, "Frame::IS_STACKLESS").word, 0).NE.Likely {
810    prev_frame := LoadI(%frame).Imm(Constants::FRAME_PREV_FRAME_OFFSET).ptr
811    next_pc := LoadI(prev_frame).Imm(Constants::FRAME_NEXT_INSTRUCTION_OFFSET).ptr
812    copy_lambda.call(prev_frame, frame_flags)
813    StoreI(%tr, prev_frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
814    call_runtime("FreeFrameInterp", frame, %tr).void
815    frame := prev_frame
816    if Options.arm64?
817      moffset := get_moffset_frame(frame)
818      method_ptr := get_method_ptr_frame(frame)
819    end
820    pc := next_pc
821  } Else {
822    save_acc()
823    Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
824  }
825end
826
827# Handlers:
828
829macro(:handle_throw) do |vs|
830  If(vs, 0).EQ.Unlikely {
831    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
832  } Else {
833    call_runtime("ThrowExceptionFromInterpreter", %tr, vs, %frame, %pc).void
834  }
835  pc := find_catch_block()
836  frame := frame_eh
837  if Options.arm64?
838    moffset := moffset_eh
839    method_ptr := method_ptr_eh
840  end
841  load_to_acc_reg(acc_eh, acc_tag_eh)
842end
843
844macro(:handle_movi) do |vd, imm|
845  set_primitive(vd, imm).i32
846end
847
848macro(:handle_movi_64) do |vd, imm|
849  set_primitive(vd, imm).i64
850end
851
852macro(:handle_mov) do |vd, vs|
853  set_primitive(vd, vs).u32
854end
855
856macro(:handle_lda) do |vs|
857  set_acc_primitive(vs)
858end
859
860macro(:handle_lda_str_id32) do |id|
861  string := call_runtime("ResolveStringByIdEntrypoint", %tr, %frame, id).ptr
862  If(string, 0).EQ.Unlikely {
863    move_to_exception
864  }
865  set_acc_object(string)
866end
867
868macro(:handle_lda_type_id16) do |id|
869  type := type_ptr(id)
870  If(type, 0).EQ.Unlikely {
871    move_to_exception
872  }
873  type_obj := LoadI(type).Imm("ark::Class::GetManagedObjectOffset()").ptr
874  set_acc_object(type_obj)
875end
876
877macro(:handle_lda_const_v8_id16) do |v, id|
878  save_acc()
879  cnst := call_runtime("ResolveLiteralArrayByIdEntrypoint", %tr, get_method_ptr(), u16tou32(id)).ref
880  acc := restore_acc().send(acc.type)
881  If(cnst, 0).EQ.Unlikely {
882    move_to_exception
883  }
884  set_object(v, cnst).ref
885end
886
887macro(:handle_ldai_imm) do |imm|
888  set_acc_primitive(imm)
889end
890
891macro(:handle_ldai_64_imm) do |imm|
892  set_acc_primitive(imm)
893end
894
895macro(:handle_fldai_imm) do |imm|
896  set_acc_primitive(imm)
897end
898
899macro(:handle_fldai_64_imm) do |imm|
900  set_acc_primitive(imm)
901end
902
903macro(:handle_sta_v8) do |vd|
904  set_primitive(vd, acc.u32).u32
905end
906
907macro(:handle_sta_64_v8) do |vd|
908  set_primitive(vd, acc.u64).u64
909end
910
911macro(:handle_jmp_imm) do |pc, imm|
912  next_pc := instrument_branches(imm, acc.type, get_method_ptr())
913  load_to_acc_reg(acc_sf, acc_tag_sf)
914  frame := frame_sf
915  if Options.arm64?
916    moffset := moffset_sf
917    method_ptr := method_ptr_sf
918  end
919  next_pc
920end
921
922macro(:handle_inci_v4_imm4) do |v, imm|
923  val := get_value(v).i32
924  add := Add(val, imm).i32
925  set_value(v, add).i32
926end
927
928[['LT', ''], ['B', 'u']].each do |cc, sign|
929  macro(:"handle_#{sign}cmp") do |acc_val, vs|
930    # TODO: use Cmp IR instruction?
931    If(acc_val, vs).send(:"#{cc.upcase}") {
932      res1 := -1
933    } Else {
934      If(acc_val, vs).EQ {
935        res2 := 0
936      } Else {
937        res3 := 1
938      }
939    }
940    acc := Phi(res1, res2, res3).i32
941  end
942end
943
944['Add', 'Sub', 'And', 'Mul', 'Or', 'Xor', 'Shl', 'Shr', 'AShr'].each do |op|
945  # v4_v4
946  macro(:"handle_#{op.downcase}_v4_v4") do |vs1, vs2|
947    set_acc_primitive(send(op, vs1, vs2).i32).i32
948  end
949  # v4_v4 without acc
950  macro(:"handle_#{op.downcase}_v_v4_v4") do |v1, v2|
951    set_primitive(v1, send(op, get_value(v1).i32, v2).i32).i32
952  end
953  # v8
954  macro(:"handle_#{op.downcase}2_v8") do |vs|
955    acc := send(op, acc.i32, vs).i32
956  end
957  # 64_v8
958  macro(:"handle_#{op.downcase}2_64_v8") do |vs|
959    acc := send(op, acc.i64, vs).i64
960  end
961  # v8_v8
962  macro(:"handle_#{op.downcase}2_v8_v8") do |vd, vs|
963    set_primitive(vd, send(op, acc.i32, vs).i32).i32
964  end
965  # 64_v8_v8
966  macro(:"handle_#{op.downcase}2_64_v8_v8") do |vd, vs|
967    set_primitive(vd, send(op, acc.i64, vs).i64).i64
968  end
969  # imm
970  macro(:"handle_#{op.downcase}i_imm") do |imm|
971    acc := send(op, acc.i32, imm).i32
972  end
973  # v4_v4_imm
974  macro(:"handle_#{op.downcase}i_v4_v4_imm") do |vd, vs, imm|
975    set_primitive(vd, send(op, vs, imm).i32)
976  end
977end
978
979['Add', 'Sub', 'Mul', 'Div'].each do |op|
980  macro(:"handle_f#{op.downcase}2_v8") do |vs|
981    acc := send(op, acc.f32, vs).f32
982  end
983  macro(:"handle_f#{op.downcase}2_64_v8") do |vs|
984    acc := send(op, acc.f64, vs).f64
985  end
986  macro(:"handle_f#{op.downcase}2_v8_v8") do |vd, vs|
987    set_primitive(vd, send(op, acc.f32, vs).f32).f32
988  end
989  macro(:"handle_f#{op.downcase}2_64_v8_v8") do |vd, vs|
990    set_primitive(vd, send(op, acc.f64, vs).f64).f64
991  end
992end
993
994macro(:handle_fmod2_v8) do |vs|
995  acc := call_runtime("fmodf", acc.f32, vs).f32
996end
997
998macro(:handle_fmod2_64_v8) do |vs|
999  acc := call_runtime("fmod", acc.f64, vs).f64
1000end
1001
1002macro(:handle_fmod2_v8_v8) do |vd, vs|
1003  set_primitive(vd, call_runtime("fmodf", acc.f32, vs).f32).f32
1004end
1005
1006macro(:handle_fmod2_64_v8_v8) do |vd, vs|
1007  set_primitive(vd, call_runtime("fmod", acc.f64, vs).f64).f64
1008end
1009
1010['Div', 'Mod'].each do |op|
1011  macro(:"handle_#{op.downcase}_v4_v4") do |vs1, vs2|
1012    If(vs2, 0).EQ.Unlikely {
1013      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1014      move_to_exception
1015    }
1016    set_acc_primitive(send(op, vs1, vs2).i32)
1017  end
1018  macro(:"handle_#{op.downcase}_v_v4_v4") do |v1, v2|
1019    If(v2, 0).EQ.Unlikely {
1020      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1021      move_to_exception
1022    }
1023    set_primitive(v1, send(op, get_value(v1).i32, v2).i32)
1024  end
1025  macro(:"handle_#{op.downcase}2_v8") do |vs|
1026    If(vs, 0).EQ.Unlikely {
1027      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1028      move_to_exception
1029    }
1030    acc := send(op, acc.i32, vs).i32
1031  end
1032  macro(:"handle_#{op.downcase}2_64_v8") do |vs|
1033    If(vs, 0).EQ.Unlikely {
1034      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1035      move_to_exception
1036    }
1037    acc := send(op, acc.i64, vs).i64
1038  end
1039  macro(:"handle_#{op.downcase}2_v8_v8") do |vd, vs|
1040    If(vs, 0).EQ.Unlikely {
1041      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1042      move_to_exception
1043    }
1044    set_primitive(vd, send(op, acc.i32, vs).i32).i32
1045  end
1046  macro(:"handle_#{op.downcase}2_64_v8_v8") do |vd, vs|
1047    If(vs, 0).EQ.Unlikely {
1048      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1049      move_to_exception
1050    }
1051    set_primitive(vd, send(op, acc.i64, vs).i64).i64
1052  end
1053  macro(:"handle_#{op.downcase}u2_v8") do |vs|
1054    If(vs, 0).EQ.Unlikely {
1055      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1056      move_to_exception
1057    }
1058    acc := send(op, acc.u32, vs).u32
1059  end
1060  macro(:"handle_#{op.downcase}u2_64_v8") do |vs|
1061    If(vs, 0).EQ.Unlikely {
1062      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1063      move_to_exception
1064    }
1065    acc := send(op, acc.u64, vs).u64
1066  end
1067  macro(:"handle_#{op.downcase}u2_v8_v8") do |vd, vs|
1068    If(vs, 0).EQ.Unlikely {
1069      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1070      move_to_exception
1071    }
1072    set_primitive(vd, send(op, acc.u32, vs).u32).u32
1073  end
1074  macro(:"handle_#{op.downcase}u2_64_v8_v8") do |vd, vs|
1075    If(vs, 0).EQ.Unlikely {
1076      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1077      move_to_exception
1078    }
1079    set_primitive(vd, send(op, acc.u64, vs).u64).u64
1080  end
1081  macro(:"handle_#{op.downcase}i_imm") do |imm|
1082    If(imm, 0).EQ.Unlikely {
1083      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1084      move_to_exception
1085    }
1086    acc := send(op, acc.i32, imm).i32
1087  end
1088  macro(:"handle_#{op.downcase}i_v4_v4_imm") do |vd, vs, imm|
1089    If(imm, 0).EQ.Unlikely {
1090      call_runtime("ThrowArithmeticExceptionFromInterpreter").void
1091      move_to_exception
1092    }
1093    set_primitive(vd, send(op, vs, imm).i32)
1094  end
1095end
1096
1097# Unary
1098['Not', 'Neg'].each do |op|
1099  macro(:"handle_#{op.downcase}") do
1100    acc := send(op, acc.i32).i32
1101  end
1102  macro(:"handle_#{op.downcase}_64") do
1103    acc := send(op, acc.i64).i64
1104  end
1105end
1106
1107[['', :f32], ['_64', :f64]].each do |name, type|
1108  macro(:"handle_fneg#{name}") do
1109    acc := Neg(acc.send(type)).send(type)
1110  end
1111end
1112
1113macro(:handle_newarr_v4_v4_id16) do |vd, vs, id|
1114  If(vs, 0).LT.Unlikely {
1115    call_runtime("ThrowNegativeArraySizeExceptionFromInterpreter", vs).void
1116    move_to_exception
1117  }
1118  save_acc()
1119  array := call_runtime("CreateArrayByIdEntrypoint", %tr, get_method_ptr(), u16tou32(id), vs).ref
1120  acc := restore_acc().ptr
1121  If(array, 0).EQ.Unlikely {
1122    move_to_exception
1123  }
1124  set_object(vd, array).ref
1125end
1126
1127macro(:handle_lenarr_v8) do |vs|
1128  If(vs, 0).EQ.Unlikely {
1129    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1130    move_to_exception
1131  }
1132  len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1133  set_acc_primitive(len_array)
1134end
1135
1136[['ldarr', :i32, 2], ['ldarr_64', :i64, 3], ['fldarr_64', :f64, 3], ['fldarr_32', :f32, 2]].each do |name, type, elem_size_shift|
1137  macro(:"handle_#{name}_v8") do |vs|
1138    If(vs, 0).EQ.Unlikely {
1139      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1140      move_to_exception
1141    }
1142    len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1143    If(acc.i32, len_array).AE.Unlikely {
1144      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1145      move_to_exception
1146    }
1147    elem_offset = AddI(ShlI(acc.i32).Imm(elem_size_shift).i32).Imm(Constants::ARRAY_DATA_OFFSET).i32
1148    acc := Load(vs, elem_offset).send(type)
1149  end
1150end
1151
1152[[8, 0], [16, 1]].each do |size, elem_size_shift|
1153  macro(:"handle_ldarr_#{size}_v8") do |vs|
1154    If(vs, 0).EQ.Unlikely {
1155      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1156      move_to_exception
1157    }
1158    len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1159    If(acc.i32, len_array).AE.Unlikely {
1160      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1161      move_to_exception
1162    }
1163    elem_offset = AddI(ShlI(acc.i32).Imm(elem_size_shift).i32).Imm(Constants::ARRAY_DATA_OFFSET).i32
1164    load_array := Load(vs, elem_offset).send(:"i#{size}")
1165    acc := send(:"i#{size}toi32", load_array)
1166  end
1167end
1168
1169[[8, 0], [16, 1]].each do |size, elem_size_shift|
1170  macro(:"handle_ldarru_#{size}_v8") do |vs|
1171    If(vs, 0).EQ.Unlikely {
1172      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1173      move_to_exception
1174    }
1175    len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1176    If(acc.i32, len_array).AE.Unlikely {
1177      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1178      move_to_exception
1179    }
1180    elem_offset = AddI(ShlI(acc.i32).Imm(elem_size_shift).i32).Imm(Constants::ARRAY_DATA_OFFSET).i32
1181    load_array := Load(vs, elem_offset).send(:"u#{size}")
1182    acc := send(:"u#{size}tou32", load_array)
1183  end
1184end
1185
1186macro(:handle_ldarr_obj_v8) do |vs|
1187  If(vs, 0).EQ.Unlikely {
1188    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1189    move_to_exception
1190  }
1191  len_array := LoadI(vs).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1192  If(acc.i32, len_array).AE.Unlikely {
1193    call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", acc.i32, len_array).void
1194    move_to_exception
1195  }
1196  elem_offset = AddI(ShlI(acc.i32).Imm(Constants::REFERENCE_TYPE_SHIFT).i32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1197  load_array := Load(vs, elem_offset).ref
1198  set_acc_object(load_array)
1199end
1200
1201[[8, 0], [16, 1]].each do |size, elem_size_shift|
1202  macro(:"handle_starr_#{size}_v4_v4") do |vs1, vs2|
1203    If(vs1, 0).EQ.Unlikely {
1204      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1205      move_to_exception
1206    }
1207    len_array := LoadI(vs1).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1208    If(vs2, len_array).AE.Unlikely {
1209      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", vs2, len_array).void
1210      move_to_exception
1211    }
1212    elem_offset = AddI(ShlI(vs2).Imm(elem_size_shift).u32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1213    Store(vs1, elem_offset, acc.i32).send(:"i#{size}")
1214  end
1215end
1216
1217[['starr', :i32, 2], ['starr_64', :i64, 3], ['fstarr_32', :f32, 2], ['fstarr_64', :f64, 3]].each do |name, type, elem_size_shift|
1218  macro(:"handle_#{name}_v4_v4") do |vs1, vs2|
1219    If(vs1, 0).EQ.Unlikely {
1220      call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1221      move_to_exception
1222    }
1223    len_array := LoadI(vs1).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1224    If(vs2, len_array).AE.Unlikely {
1225      call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", vs2, len_array).void
1226      move_to_exception
1227    }
1228    elem_offset = AddI(ShlI(vs2).Imm(elem_size_shift).u32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1229    Store(vs1, elem_offset, acc.send(type)).send(type)
1230  end
1231end
1232
1233macro(:handle_starr_obj_v4_v4) do |vs1, vs2|
1234  If(vs1, 0).EQ.Unlikely {
1235    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1236    move_to_exception
1237  }
1238  len_array := LoadI(vs1).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
1239  If(vs2, len_array).AE.Unlikely {
1240    call_runtime("ThrowArrayIndexOutOfBoundsExceptionFromInterpreter", vs2, len_array).void
1241    move_to_exception
1242  }
1243  res := call_runtime("CheckStoreArrayReferenceFromInterpreter", vs1, acc.ref).u8
1244  If(res, 0).NE.Unlikely {
1245    move_to_exception
1246  }
1247  elem_offset = AddI(ShlI(vs2).Imm(Constants::REFERENCE_TYPE_SHIFT).u32).Imm(Constants::ARRAY_DATA_OFFSET).u32
1248  Store(vs1, elem_offset, acc.ref).SetNeedBarrier(true).ref
1249end
1250
1251macro(:handle_newobj_v8_id16) do |vd, id|
1252  save_acc()
1253  type := type_ptr(id, false, false)
1254  If(type, 0).EQ.Unlikely {
1255    move_to_exception
1256  }
1257  object := call_runtime("CreateObjectByClassInterpreter", %tr, type).ref
1258  acc := restore_acc().ptr
1259  If(object, 0).EQ.Unlikely {
1260    move_to_exception
1261  }
1262  set_object(vd, object).ref
1263end
1264
1265macro(:assert_non_volatile) do |field|
1266  if defines.DEBUG
1267    field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1268    is_volatile := AndI(field_access_flags).Imm("ACC_VOLATILE").u32
1269    If(is_volatile, 0).NE.Unlikely {
1270        Intrinsic(:UNREACHABLE).Terminator.void
1271    }
1272  end
1273end
1274
1275macro(:handle_stobj_v8_id16) do |vs, id|
1276  If(vs, 0).EQ.Unlikely {
1277    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1278    move_to_exception
1279  }
1280  field := field_offset(id)
1281  If(field, 0).EQ.Unlikely {
1282    move_to_exception
1283  }
1284
1285  assert_non_volatile(field)
1286
1287  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1288  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1289  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1290
1291  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1292    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1293      If(field_type_id, typeid).EQ {
1294        acc_type = field_type[0] + "32"
1295        Store(vs, offset, acc.send(:"#{acc_type}")).send(:"#{field_type}")
1296      }
1297    end
1298  } Else {
1299    Store(vs, offset, acc.u32).u32
1300  }
1301end
1302
1303macro(:handle_stobj_64_v8_id16) do |vs, id|
1304  If(vs, 0).EQ.Unlikely {
1305    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1306    move_to_exception
1307  }
1308  field := field_offset(id)
1309  If(field, 0).EQ.Unlikely {
1310    move_to_exception
1311  }
1312
1313  assert_non_volatile(field)
1314
1315  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1316  Store(vs, offset, acc.u64).u64
1317end
1318
1319macro(:handle_stobj_obj_v8_id16) do |vs, id|
1320  If(vs, 0).EQ.Unlikely {
1321    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1322    move_to_exception
1323  }
1324  field := field_offset(id)
1325  If(field, 0).EQ.Unlikely {
1326    move_to_exception
1327  }
1328
1329  assert_non_volatile(field)
1330
1331  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1332  Store(vs, offset, acc.ref).SetNeedBarrier(true).ref
1333end
1334
1335macro(:handle_stobj_v_v4_v4_id16) do |v1, v2, id|
1336  If(v2, 0).EQ.Unlikely {
1337    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1338    move_to_exception
1339  }
1340  field := field_offset(id)
1341  If(field, 0).EQ.Unlikely {
1342    move_to_exception
1343  }
1344
1345  assert_non_volatile(field)
1346
1347  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1348  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1349  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1350
1351  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1352    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1353      If(field_type_id, typeid).EQ {
1354        reg_type = field_type[0] + "32"
1355        Store(v2, offset, v1.send(:"#{reg_type}")).send(:"#{field_type}")
1356      }
1357    end
1358  } Else {
1359    Store(v2, offset, v1.u32).u32
1360  }
1361end
1362
1363macro(:handle_stobj_v_64_v4_v4_id16) do |v1, v2, id|
1364  If(v2, 0).EQ.Unlikely {
1365    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1366    move_to_exception
1367  }
1368  field := field_offset(id)
1369  If(field, 0).EQ.Unlikely {
1370    move_to_exception
1371  }
1372
1373  assert_non_volatile(field)
1374
1375  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1376  Store(v2, offset, v1.u64).u64
1377end
1378
1379macro(:handle_stobj_v_obj_v4_v4_id16) do |v1, v2, id|
1380  If(v2, 0).EQ.Unlikely {
1381    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1382    move_to_exception
1383  }
1384  field := field_offset(id)
1385  If(field, 0).EQ.Unlikely {
1386    move_to_exception
1387  }
1388
1389  assert_non_volatile(field)
1390
1391  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1392  Store(v2.ref, offset, v1.ref).SetNeedBarrier(true).ref
1393end
1394
1395macro(:handle_ldobj_v8_id16) do |vs, id|
1396  If(vs, 0).EQ.Unlikely {
1397    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1398    move_to_exception
1399  }
1400  field := field_offset(id)
1401  # no restore as acc is going to be redefined
1402  If(field, 0).EQ.Unlikely {
1403    move_to_exception
1404  }
1405
1406  assert_non_volatile(field)
1407
1408  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1409  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1410  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1411
1412  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1413    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1414      If(field_type_id, typeid).EQ {
1415        store_type = field_type[0] + "32"
1416        value := Load(vs, offset).send(:"#{field_type}")
1417        acc_value := send(:"#{field_type}to#{store_type}", value)
1418      }
1419      acc := Phi(acc.u64, acc_value.u64).u64
1420    end
1421    acc_casted_slow := acc
1422  } Else {
1423    acc_casted_fast := u32tou64(Load(vs, offset).u32)
1424  }
1425
1426  acc := Phi(acc_casted_slow.u64, acc_casted_fast.u64).u64
1427  acc_tag := Constants::PRIMITIVE_TAG
1428end
1429
1430macro(:handle_ldobj_64_v8_id16) do |vs, id|
1431  If(vs, 0).EQ.Unlikely {
1432    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1433    move_to_exception
1434  }
1435  field := field_offset(id)
1436  # no restore as acc is going to be redefined
1437  If(field, 0).EQ.Unlikely {
1438    move_to_exception
1439  }
1440
1441  assert_non_volatile(field)
1442
1443  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1444  acc := Load(vs, offset).u64
1445  acc_tag := Constants::PRIMITIVE_TAG
1446end
1447
1448macro(:handle_ldobj_obj_v8_id16) do |vs, id|
1449  If(vs, 0).EQ.Unlikely {
1450    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1451    move_to_exception
1452  }
1453  field := field_offset(id)
1454  # no restore as acc is going to be redefined
1455  If(field, 0).EQ.Unlikely {
1456    move_to_exception
1457  }
1458
1459  assert_non_volatile(field)
1460
1461  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1462  value := Load(vs, offset).ref
1463  set_acc_object(value).ref
1464end
1465
1466macro(:handle_ldobj_v_v4_v4_id16) do |vd, vs, id|
1467  If(vs, 0).EQ.Unlikely {
1468    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1469    move_to_exception
1470  }
1471  field := field_offset(id)
1472  If(field, 0).EQ.Unlikely {
1473    move_to_exception
1474  }
1475
1476  assert_non_volatile(field)
1477
1478  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1479  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1480  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1481
1482  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1483    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1484      If(field_type_id, typeid).EQ {
1485        store_type = field_type[0] + "32"
1486        value := Load(vs, offset).send(:"#{field_type}")
1487        set_primitive(vd, send(:"#{field_type}to#{store_type}", value)).send(:"#{store_type}")
1488      }
1489    end
1490  } Else {
1491    set_primitive(vd, Load(vs, offset).u32).u32
1492  }
1493end
1494
1495macro(:handle_ldobj_v_64_v4_v4_id16) do |vd, vs, id|
1496  If(vs, 0).EQ.Unlikely {
1497    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1498    move_to_exception
1499  }
1500  field := field_offset(id)
1501  If(field, 0).EQ.Unlikely {
1502    move_to_exception
1503  }
1504
1505  assert_non_volatile(field)
1506
1507  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1508  value := Load(vs, offset).u64
1509  set_primitive(vd, value).u64
1510end
1511
1512macro(:handle_ldobj_v_obj_v4_v4_id16) do |vd, vs, id|
1513  If(vs, 0).EQ.Unlikely {
1514    call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1515    move_to_exception
1516  }
1517  field := field_offset(id)
1518  If(field, 0).EQ.Unlikely {
1519    move_to_exception
1520  }
1521
1522  assert_non_volatile(field)
1523
1524  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1525  value := Load(vs, offset).ref
1526  set_object(vd, value).ref
1527end
1528
1529macro(:handle_ststatic_id16) do |id|
1530  update_bytecode_offset
1531
1532  field := static_field(id, false)
1533  # no restore because acc holds primitive value
1534
1535  If(field, 0).EQ.Unlikely {
1536    move_to_exception
1537  }
1538
1539  assert_non_volatile(field)
1540
1541  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1542  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1543  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1544  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1545
1546  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1547    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1548      If(field_type_id, typeid).EQ {
1549        acc_type = field_type[0] + "32"
1550        Store(field_class, offset, acc.send(:"#{acc_type}")).send(:"#{field_type}")
1551      }
1552    end
1553  } Else {
1554    Store(field_class, offset, acc.u32).u32
1555  }
1556end
1557
1558macro(:handle_ststatic_64_id16) do |id|
1559  update_bytecode_offset
1560
1561  field := static_field(id, false)
1562  # no restore because acc holds primitive value
1563
1564  If(field, 0).EQ.Unlikely {
1565    move_to_exception
1566  }
1567
1568  assert_non_volatile(field)
1569
1570  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1571  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1572  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1573  Store(field_class, offset, acc.u64).u64
1574end
1575
1576macro(:handle_ststatic_obj_id16) do |id|
1577  update_bytecode_offset
1578  field := static_field(id)
1579  acc := Phi(acc, acc_restored).ref
1580  If(field, 0).EQ.Unlikely {
1581    move_to_exception
1582  }
1583
1584  assert_non_volatile(field)
1585
1586  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1587  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref_uint
1588  class_managed_object := LoadI(bitcast_to_ref(field_class)).Imm(Constants::BASE_CLASS_MANAGED_OBJECT_OFFSET).ref_uint
1589  offset_managed_object := Add(offset, Sub(field_class, class_managed_object).ref_uint).u32
1590
1591  Store(bitcast_to_ref(class_managed_object).ref, offset_managed_object, acc.ref).SetNeedBarrier(true).ref
1592end
1593
1594macro(:handle_ldstatic_id16) do |id|
1595  update_bytecode_offset
1596  save_acc()
1597  field := static_field(id, false)
1598  # no restore as acc is going to be redefined
1599  If(field, 0).EQ.Unlikely {
1600    move_to_exception
1601  }
1602
1603  assert_non_volatile(field)
1604
1605  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1606  field_type_id := ShrI(AndI(field_access_flags).Imm("ACC_TYPE").u32).Imm("ACC_TYPE_SHIFT").u32
1607  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1608  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1609
1610  If(field_type_id, 0x7).LT.Unlikely { # < 32 bits
1611    [[0x2, "u8"], [0x3, "i8"], [0x4, "u8"], [0x5, "i16"], [0x6, "u16"]].each do |typeid, field_type|
1612      If(field_type_id, typeid).EQ {
1613        store_type = field_type[0] + "32"
1614        value := Load(field_class, offset).send(:"#{field_type}")
1615        acc_value := send(:"#{field_type}to#{store_type}", value)
1616      }
1617      acc := Phi(acc.u64, acc_value.u64).u64
1618    end
1619    acc_casted_slow := acc
1620  } Else {
1621    acc_casted_fast := u32tou64(Load(field_class, offset).u32)
1622  }
1623
1624  acc := Phi(acc_casted_slow.u64, acc_casted_fast.u64).u64
1625  acc_tag := Constants::PRIMITIVE_TAG
1626end
1627
1628macro(:handle_ldstatic_64_id16) do |id|
1629  update_bytecode_offset
1630  save_acc()
1631  field := static_field(id, false)
1632  # no restore as acc is going to be redefined
1633  If(field, 0).EQ.Unlikely {
1634    move_to_exception
1635  }
1636
1637  assert_non_volatile(field)
1638
1639  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1640  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref
1641  field_access_flags := LoadI(field).Imm(Constants::FIELD_ACCESS_FLAGS_OFFSET).u32
1642  acc := Load(field_class, offset).u64
1643  acc_tag := Constants::PRIMITIVE_TAG
1644end
1645
1646macro(:handle_ldstatic_obj_id16) do |id|
1647  update_bytecode_offset
1648  save_acc()
1649  field := static_field(id, false)
1650  # no restore as acc is going to be redefined
1651  If(field, 0).EQ.Unlikely {
1652    move_to_exception
1653  }
1654
1655  assert_non_volatile(field)
1656
1657  offset := LoadI(field).Imm(Constants::FIELD_OFFSET_OFFSET).u32
1658  field_class := LoadI(field).Imm(Constants::FIELD_CLASS_OFFSET).ref_uint
1659  class_managed_object := LoadI(bitcast_to_ref(field_class)).Imm(Constants::BASE_CLASS_MANAGED_OBJECT_OFFSET).ref_uint
1660  offset_managed_object := Add(offset, Sub(field_class, class_managed_object).ref_uint).u32
1661
1662  value := Load(bitcast_to_ref(class_managed_object), offset_managed_object).ref
1663  set_acc_object(value).ref
1664end
1665
1666macro(:handle_isinstance_id16) do |id|
1667  type := type_ptr(id, true)
1668  acc := Phi(acc, acc_restored).ref
1669  If(type, 0).EQ.Unlikely {
1670    move_to_exception
1671  }
1672  set_acc_primitive(call_runtime("IsInstanceByIdEntrypoint", acc.ref, type).u32)
1673end
1674
1675macro(:handle_checkcast_id16) do |id|
1676  type := type_ptr(id, true)
1677  acc := Phi(acc, acc_restored).ref
1678  If(type, 0).EQ.Unlikely {
1679    move_to_exception
1680  }
1681  If(call_runtime("CheckCastByIdEntrypoint", acc.ref, type).u32, 0).NE.Unlikely {
1682    move_to_exception
1683  }
1684end
1685
1686macro(:handle_sta_obj_v8) do |vd|
1687  set_object(vd, acc.ref).ref
1688end
1689
1690macro(:handle_lda_obj_v8) do |vs|
1691  set_acc_object(vs)
1692end
1693
1694macro(:handle_mov_null_v8) do |vd|
1695  set_object(vd, 0).ref
1696end
1697
1698macro(:handle_lda_null) do
1699  set_acc_object(0)
1700end
1701
1702['eq', 'ne', 'lt', 'gt', 'le', 'ge'].each do |cc|
1703  ['8', '16'].each do |from|
1704    macro(:"handle_j#{cc}_v8_imm#{from}") do |pc, vs, imm, size|
1705      method_ptr := get_method_ptr()
1706      If(acc.i32, vs).send(:"#{cc.upcase}") {
1707        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1708        update_branch_taken(method_ptr)
1709        pc1 := instrument_branches(imm_casted, :"i32", method_ptr)
1710      } Else {
1711        update_branch_untaken(method_ptr)
1712        pc2 := advance_pc_imm(pc, size)
1713      }
1714      load_to_acc_reg(Phi(acc_sf, acc).i32, Phi(acc_tag_sf, acc_tag).i64)
1715      frame := Phi(frame_sf, %frame).ptr
1716      if Options.arm64?
1717        moffset := Phi(moffset_sf, %moffset).word
1718        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1719      end
1720      Phi(pc1, pc2).ptr
1721    end
1722  end
1723end
1724
1725['ne', 'eq', 'lt', 'gt', 'le', 'ge'].each do |cc|
1726  ['8', '16'].each do |from|
1727    macro(:"handle_j#{cc}z_imm#{from}") do |pc, imm, size|
1728      method_ptr := get_method_ptr()
1729      If(acc.i32, 0).send(:"#{cc.upcase}") {
1730        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1731        update_branch_taken(method_ptr)
1732        pc1 := instrument_branches(imm_casted, :"i32", method_ptr)
1733      } Else {
1734        update_branch_untaken(method_ptr)
1735        pc2 := advance_pc_imm(pc, size)
1736      }
1737      load_to_acc_reg(Phi(acc_sf, acc).i32, Phi(acc_tag_sf, acc_tag).i64)
1738      frame := Phi(frame_sf, %frame).ptr
1739      if Options.arm64?
1740        moffset := Phi(moffset_sf, %moffset).word
1741        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1742      end
1743      Phi(pc1, pc2).ptr
1744    end
1745  end
1746end
1747
1748macro(:"handle_fcmpg_v8") do |vs|
1749  acc := Cmp(acc.f32, vs).SrcType("DataType::FLOAT32").Fcmpg(true).i32
1750end
1751
1752macro(:"handle_fcmpg_64_v8") do |vs|
1753  acc := Cmp(acc.f64, vs).SrcType("DataType::FLOAT64").Fcmpg(true).i32
1754end
1755
1756macro(:"handle_fcmpl_v8") do |vs|
1757  acc := Cmp(acc.f32, vs).i32
1758end
1759
1760macro(:"handle_fcmpl_64_v8") do |vs|
1761  acc := Cmp(acc.f64, vs).i32
1762end
1763
1764['ne', 'eq'].each do |cc|
1765  ['8', '16'].each do |from|
1766    macro(:"handle_j#{cc}_obj_v8_imm#{from}") do |pc, vs, imm, size|
1767      method_ptr := get_method_ptr()
1768      If(vs, acc.ref).send(:"#{cc.upcase}") {
1769        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1770        update_branch_taken(method_ptr)
1771        pc1 := instrument_branches(imm_casted, :"ref", method_ptr)
1772      } Else {
1773        update_branch_untaken(method_ptr)
1774        pc2 := advance_pc_imm(pc, size)
1775      }
1776      load_to_acc_reg(Phi(acc_sf, acc).ref, Phi(acc_tag_sf, acc_tag).i64)
1777      frame := Phi(frame_sf, %frame).ptr
1778      if Options.arm64?
1779        moffset := Phi(moffset_sf, %moffset).word
1780        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1781      end
1782      Phi(pc1, pc2).ptr
1783    end
1784  end
1785end
1786
1787['ne', 'eq'].each do |cc|
1788  ['8', '16'].each do |from|
1789    macro(:"handle_j#{cc}z_obj_imm#{from}") do |pc, imm, size|
1790      method_ptr := get_method_ptr()
1791      If(acc.ref, 0).send(:"#{cc.upcase}") {
1792        imm_casted = Cast(imm).SrcType("DataType::INT#{from}").i32
1793        update_branch_taken(method_ptr)
1794        pc1 := instrument_branches(imm_casted, :"ref", method_ptr)
1795      } Else {
1796        update_branch_untaken(method_ptr)
1797        pc2 := advance_pc_imm(pc, size)
1798      }
1799      load_to_acc_reg(Phi(acc_sf, acc).ref, Phi(acc_tag_sf, acc_tag).i64)
1800      frame := Phi(frame_sf, %frame).ptr
1801      if Options.arm64?
1802        moffset := Phi(moffset_sf, %moffset).word
1803        method_ptr := Phi(method_ptr_sf, %method_ptr).ptr
1804      end
1805      Phi(pc1, pc2).ptr
1806    end
1807  end
1808end
1809
1810# Conversions from integer types to u1
1811
1812['i32', 'i64', 'u32', 'u64'].each do |from|
1813  macro(:"handle_#{from}tou1") do
1814    acc := send(:"#{from}tou1", acc.send(from))
1815  end
1816end
1817
1818# Integer truncations and extensions
1819
1820['i32', 'u32'].each do |from|
1821  macro(:"handle_#{from}toi64") do
1822    acc := send(:"#{from}toi64", acc.send(from))
1823  end
1824end
1825
1826['i32', 'u32'].each do |from|
1827  ['i16', 'u16', 'i8', 'u8'].each do |to|
1828    macro(:"handle_#{from}to#{to}") do
1829      value := send(:"#{from}to#{to}", acc.send(from))
1830      to_expanded = to.gsub(/\d+/,"32")
1831      acc := send(:"#{to}to#{to_expanded}", value)
1832    end
1833  end
1834end
1835
1836macro(:handle_i64toi32) do
1837  acc := i64toi32(acc.i64)
1838end
1839
1840['i32', 'u32'].each do |to|
1841  macro(:"handle_u64to#{to}") do
1842    acc := send(:"u64to#{to}", acc.u64)
1843  end
1844end
1845
1846# Conversions between integer and floating point types
1847
1848['i32', 'u32', 'i64', 'u64'].each do |from|
1849  ['f32', 'f64'].each do |to|
1850    macro(:"handle_#{from}to#{to}") do
1851      acc := send(:"#{from}to#{to}", acc.send(from))
1852    end
1853  end
1854end
1855
1856['f64', 'i32', 'i64', 'u32', 'u64'].each do |to|
1857  macro(:"handle_f32to#{to}") do
1858    acc := send(:"f32to#{to}", acc.f32)
1859  end
1860end
1861
1862['i32', 'i64', 'u32', 'u64', 'f32'].each do |to|
1863  macro(:"handle_f64to#{to}") do
1864    acc := send("f64to#{to}", acc.f64)
1865  end
1866end
1867
1868macro(:handle_mov_64) do |vd, vs|
1869  set_primitive(vd, vs).u64
1870end
1871
1872macro(:handle_mov_obj) do |vd, vs|
1873  set_object(vd, vs).ref
1874end
1875
1876macro(:handle_lda_64) do |vs|
1877  set_acc_primitive(vs)
1878end
1879
1880macro(:handle_sta_64_v8) do |vd|
1881  set_primitive(vd, acc.u64).u64
1882end
1883
1884macro(:handle_i32tof64) do
1885  acc := i32tof64(acc.i32)
1886end
1887
1888macro(:handle_fmovi_v8_imm) do |vd, imm|
1889  set_primitive(vd, imm).f32
1890end
1891
1892macro(:handle_fmovi_64_v8_imm) do |vd, imm|
1893  set_primitive(vd, imm).f64
1894end
1895
1896macro(:get_callee) do |id, is_virt, is_initobj, v, imm = nil|
1897  update_bytecode_offset
1898  if is_initobj
1899    callee := cache_entry(id, false, false, :ptr, nil)
1900  else
1901    callee := callee_ptr(id, true)
1902    acc := Phi(acc, acc_restored).send(acc.type)
1903    If(callee, 0).EQ.Unlikely {
1904      move_to_exception
1905    }
1906  end
1907  if !is_initobj
1908    method_flags := LoadI(callee).Imm(Constants::METHOD_ACCESS_FLAGS_OFFSET).Volatile(true).u32
1909    If(AndI(method_flags).Imm("ark::ACC_STATIC").u32, 0).EQ.Unlikely {
1910      receiver = get_receiver(v, imm)
1911      receiver_word := Bitcast(receiver).SrcType("DataType::POINTER").word
1912      receiver_ref = Cast(receiver_word).SrcType(Options.arch_64_bits? ? "DataType::UINT64" : "DataType::UINT32").ref_uint
1913      If(receiver_ref, 0).EQ.Unlikely {
1914        call_runtime("ThrowNullPointerExceptionFromInterpreter").void
1915        move_to_exception
1916      }
1917      if is_virt
1918        callee_virt := call_runtime("ResolveVirtualMethod", callee, %frame, receiver_ref, %pc, method_ptr).ptr
1919      else
1920        callee_virt := callee
1921      end
1922      }
1923    Phi(callee, callee_virt).ptr
1924  else
1925    callee
1926  end
1927end
1928
1929['initobj', 'call', 'call_virt'].each do |op|
1930  macro(:"handle_#{op}_short_v4_v4_id16") do |v1, v2, id, size|
1931    is_initobj = (op == 'initobj')
1932    callee := get_callee(id, op.include?('virt'), is_initobj, v1)
1933    copy_lambda := lambda { |new_frame, num_vregs, _, new_moffset|
1934      copy_reg(new_frame, num_vregs, v1, new_moffset)
1935      copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1936    }
1937    if is_initobj
1938      initobj_call(id, size, callee, 2, copy_lambda, 0, v1)
1939    else
1940      generic_call(id, size, is_initobj, callee, 2, copy_lambda)
1941    end
1942  end
1943end
1944
1945['call', 'call_virt'].each do |op|
1946  macro(:"handle_#{op}_acc_short_v4_imm4_id16") do |v, imm, id, size|
1947    callee := get_callee(id, op.include?('virt'), false, v, imm)
1948    generic_call(id, size, false, callee, 2, lambda do |new_frame, num_vregs, _, new_moffset|
1949      If(imm, 0).EQ {
1950        copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, num_vregs), new_moffset)
1951        copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v, new_moffset)
1952      } Else {
1953        copy_reg(new_frame, num_vregs, v, new_moffset)
1954        copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(1).word), new_moffset)
1955      }
1956    end)
1957  end
1958end
1959
1960['call', 'call_virt'].each do |op|
1961  macro(:"handle_#{op}_acc_v4_v4_v4_imm4_id16") do |v1, v2, v3, imm, id, size|
1962    callee := get_callee(id, op.include?('virt'), false, v1, imm)
1963    generic_call(id, size, false, callee, 4, lambda do |new_frame, num_vregs, _, new_moffset|
1964      If(imm, 0).EQ {
1965        copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, num_vregs), new_moffset)
1966        copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v1, new_moffset)
1967        copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v2, new_moffset)
1968        copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3, new_moffset)
1969      } Else {
1970        If(imm, 1).EQ {
1971          copy_reg(new_frame, num_vregs, v1, new_moffset)
1972          copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(1).word), new_moffset)
1973          copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v2, new_moffset)
1974          copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3, new_moffset)
1975        } Else {
1976          If(imm, 2).EQ {
1977            copy_reg(new_frame, num_vregs, v1, new_moffset)
1978            copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1979            copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(2).word), new_moffset)
1980            copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3, new_moffset)
1981          } Else {
1982            # TODO(mbolshov): assert imm==3
1983            copy_reg(new_frame, num_vregs, v1, new_moffset)
1984            copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
1985            copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v3, new_moffset)
1986            copy_acc_to_reg(new_frame, frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(3).word), new_moffset)
1987          }
1988        }
1989      }
1990    end)
1991  end
1992end
1993
1994['initobj', 'call', 'call_virt'].each do |op|
1995  macro(:"handle_#{op}_v4_v4_v4_v4_id16") do |v1, v2, v3, v4, id, size|
1996    is_initobj = (op == 'initobj')
1997    callee := get_callee(id, op.include?('virt'), is_initobj, v1)
1998    copy_lambda := lambda { |new_frame, num_vregs, _, new_moffset|
1999      copy_reg(new_frame, num_vregs, v1, new_moffset)
2000      copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2, new_moffset)
2001      copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v3, new_moffset)
2002      copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v4, new_moffset)
2003    }
2004    if is_initobj
2005      initobj_call(id, size, callee, 4, copy_lambda, 1, v1)
2006    else
2007      generic_call(id, size, false, callee, 4, copy_lambda)
2008    end
2009  end
2010end
2011
2012['initobj', 'call', 'call_virt'].each do |op|
2013  macro(:"handle_#{op}_range_v8_id16") do |v, id, size|
2014    is_initobj = (op == 'initobj')
2015    callee := get_callee(id, op.include?('virt'), is_initobj, v)
2016    copy_lambda := lambda { |new_frame, num_vregs, num_args, new_moffset|
2017      dst_ptr_0 := frame_vreg_ptr(new_frame, num_vregs)
2018      src_ptr_0 := vreg_ptr(v)
2019      i0 := 0
2020      Label(:Head)  # TODO(mbolshov): use While loops when they are ready
2021      i := Phi(i0, i1).word
2022      If(i, num_args).EQ.Unlikely do
2023        Goto(:Exit)
2024      end
2025      offset := Mul(i, Constants::VREGISTER_SIZE).word
2026      dst_ptr := Add(dst_ptr_0, offset).ptr
2027      src_ptr := Add(src_ptr_0, offset).ptr
2028      set_value(dst_ptr, get_value(src_ptr).i64)
2029      set_tag_frame(new_frame, dst_ptr, get_tag(src_ptr), new_moffset)
2030      i1 := Add(i, 1).word
2031      Goto(:Head)
2032      Label(:Exit)
2033    }
2034    if is_initobj
2035      initobj_call(id, size, callee, nil, copy_lambda, 2, v)
2036    else
2037      generic_call(id, size, false, callee, nil, copy_lambda)
2038    end
2039  end
2040end
2041
2042[:handle_return, :handle_return_64, :handle_return_obj].each do |handler|
2043  macro(handler) do
2044    generic_return(lambda { |prev_frame, _| copy_acc(acc_ptr_frame(prev_frame)) })
2045  end
2046end
2047
2048macro(:handle_fake_return) do
2049    frame_flags := LoadI(%frame).Imm(Constants::FRAME_FLAGS_OFFSET).word
2050
2051    If(And(frame_flags, "Frame::IS_STACKLESS").word, 0).EQ.Unlikely {
2052      Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
2053    }
2054
2055    fake_frame := LoadI(%frame).Imm(Constants::FRAME_PREV_FRAME_OFFSET).ptr
2056    if Options.arm64?
2057      fake_moffset := get_moffset_frame(fake_frame)
2058      fake_method_ptr := get_method_ptr_frame(fake_frame)
2059    end
2060    fake_pc := LoadI(fake_frame).Imm(Constants::FRAME_NEXT_INSTRUCTION_OFFSET).ptr
2061
2062    If(And(frame_flags, "Frame::IS_INITOBJ").word, 0).NE.Unlikely {
2063      fake_acc_initobj := LoadI(acc_ptr_frame(fake_frame)).Imm(0).send(acc.type)
2064      fake_acc_tag_initobj := Constants::OBJECT_TAG
2065    } Else {
2066      fake_acc_general := restore_acc().send(acc.type)
2067      fake_acc_tag_general := restore_acc_tag()
2068    }
2069    fake_acc := Phi(fake_acc_initobj, fake_acc_general).send(acc.type)
2070    fake_acc_tag := Phi(fake_acc_tag_initobj, fake_acc_tag_general).i64
2071    StoreI(%tr, fake_frame).Imm(Constants::THREAD_FRAME_OFFSET).ptr
2072    call_runtime("FreeFrameInterp", frame, %tr).void
2073
2074    If(exception_val(), 0).NE.Unlikely {
2075      frame := fake_frame
2076      fake_frame_insts := LoadI(fake_frame).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
2077      fake_frame_bc_offset := LoadI(fake_frame).Imm(Constants::FRAME_BYTECODE_OFFSET).u64
2078      pc := Add(fake_frame_insts, fake_frame_bc_offset).ptr
2079      move_to_exception
2080    }
2081end
2082
2083macro(:handle_return_void) do
2084  generic_return(lambda { |prev_frame, cur_frame_flags|
2085    If(And(cur_frame_flags, "Frame::IS_INITOBJ").word, 0).NE.Unlikely do
2086      acc_obj := LoadI(acc_ptr_frame(prev_frame)).Imm(0).send(acc.type)
2087      acc_tag_obj := Constants::OBJECT_TAG
2088    end
2089    load_to_acc_reg(Phi(acc, acc_obj).send(acc.type), Phi(acc_tag.i64, acc_tag_obj).i64)
2090  })
2091end
2092
2093include_plugin 'interpreter_handlers'
2094
2095# Functions:
2096
2097function(:ExecuteImplFast,
2098         params: { 'tr' => 'ptr', 'pc' => 'ptr', 'frame' => 'ptr', 'dispatch_table' => 'ptr' },
2099         regmap: handler_regmap,
2100         regalloc_set: $panda_mask,
2101         mode: [:InterpreterEntry],
2102         validate: InterpreterValidation) do
2103  # Arm32 is not supported
2104  if Options.arch == :arm32
2105    Intrinsic(:UNREACHABLE).Terminator.void
2106    next
2107  end
2108  # Setup registers according to internal interpreter calling convention:
2109  LiveOut(tr).DstReg(regmap[:tr]).ptr
2110  LiveOut(frame).DstReg(regmap[:frame]).ptr
2111  if Options.arm64?
2112    moffset := get_moffset_frame(frame)
2113    method_ptr := get_method_ptr_frame(frame)
2114    LiveOut(moffset).DstReg(regmap[:moffset]).word
2115    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
2116  end
2117
2118  # To prevent falling during frame verification, while acc is not initialized
2119  acc := LoadI(frame).Imm(Constants::GET_ACC_OFFSET).ptr
2120  acc_tag := LoadI(AddI(frame).Imm(Constants::GET_ACC_OFFSET).ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
2121
2122  LiveOut(acc).DstReg(regmap[:acc]).ptr
2123  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr
2124
2125  dispatch(dispatch_table, pc)
2126end
2127
2128function(:ExecuteImplFastEH,
2129         params: { 'tr' => 'ptr', 'pc' => 'ptr', 'frame' => 'ptr', 'dispatch_table' => 'ptr' },
2130         regmap: handler_regmap,
2131         regalloc_set: $panda_mask,
2132         mode: [:InterpreterEntry],
2133         validate: InterpreterValidation) do
2134  # Arm32 is not supported
2135  if Options.arch == :arm32
2136    Intrinsic(:UNREACHABLE).Terminator.void
2137    next
2138  end
2139  # Setup registers according to internal interpreter calling convention:
2140  LiveOut(tr).DstReg(regmap[:tr]).ptr
2141  LiveOut(frame).DstReg(regmap[:frame]).ptr
2142  if Options.arm64?
2143    moffset := get_moffset_frame(frame)
2144    method_ptr := get_method_ptr_frame(frame)
2145    LiveOut(moffset).DstReg(regmap[:moffset]).word
2146    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
2147  end
2148
2149  # To prevent falling during frame verification, while acc is not initialized
2150  acc := LoadI(frame).Imm(Constants::GET_ACC_OFFSET).ptr
2151  acc_tag := LoadI(AddI(frame).Imm(Constants::GET_ACC_OFFSET).ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
2152
2153  LiveOut(acc).DstReg(regmap[:acc]).ptr
2154  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr
2155
2156  LiveOut(pc).DstReg(regmap[:pc]).ptr
2157  LiveOut(dispatch_table).DstReg(regmap[:dispatch]).ptr
2158  addr := Load(dispatch_table, Panda::dispatch_table.handler_names.size * 8).ptr
2159  tail_call(addr)
2160end
2161
2162Panda.instructions.each do |i|
2163  op = i.operands # alias for brevity
2164  mode = [:Interpreter]
2165  mode.push(:DynamicMethod, :DynamicStub) if i.properties.include?('dynamic')
2166  lang =  i.namespace == 'core' ? 'PANDA_ASSEMBLY' : i.namespace.upcase
2167
2168  # Remove profile part from the handler name, thereby we avoid adjusting of handler names each time we add profile
2169  # info for an instruction.
2170  handler_name = i.handler_name.gsub(/_PROF\d+/, '')
2171
2172  function("HANDLE_FAST_#{handler_name}",
2173           regmap: handler_regmap,
2174           regalloc_set: $panda_mask,
2175           mode: mode,
2176           lang: lang,
2177           validate: InterpreterValidation) do
2178    # Arm32 is not supported
2179    if Options.arch == :arm32
2180      Intrinsic(:UNREACHABLE).Terminator.void
2181      next
2182    end
2183    src_acc_type = i.acc_and_operands.select(&:src?).select(&:acc?).first&.type&.to_sym
2184    acc_type_map = {
2185      :b32 => :u32,
2186      :b64 => :u64,
2187      :u1 => :u32,
2188      :u8 => :u32,
2189      :u16 => :u32,
2190      :i8 => :i32,
2191      :i16 => :i32,
2192      :any => :u64,
2193      :top => :ptr
2194    }
2195    storage_type_map = {
2196      :f32 => :u32,
2197      :f64 => :u64,
2198      :b32 => :u32,
2199      :b64 => :u64,
2200      :u1 => :u8,
2201      :any => :u64,
2202      :top => :ptr
2203    }
2204    acc_src_storage_type = storage_type_map[src_acc_type] || src_acc_type || :ptr
2205    if i.properties.include?('dynamic')  # investigate and remove this if-clause
2206      save_acc().send(acc_src_storage_type)
2207    end
2208    if defines.DEBUG
2209      call_runtime("DebugPrintEntrypoint", %frame, %pc, %acc, %acc_tag).void
2210    end
2211    if src_acc_type == :f32
2212      acc := Bitcast(%acc.u32).SrcType("DataType::UINT32").f32
2213    elsif src_acc_type == :f64
2214      acc := Bitcast(%acc.u64).SrcType("DataType::UINT64").f64
2215    else
2216      acc := %acc.send(acc_src_storage_type)
2217    end
2218
2219    acc_tag := (%acc_tag).sword
2220    pc := %pc
2221    table := %dispatch
2222    frame := %frame
2223    if Options.arm64?
2224      moffset := (%moffset).word
2225      method_ptr := %method_ptr
2226    end
2227    tr := %tr
2228
2229    if defines.DEBUG
2230      if !i.properties.include?('dynamic')
2231        i.acc_and_operands.each do |o|
2232          if o.dst? && !o.src?
2233            next
2234          end
2235          if o.acc?
2236            if o.type == "ref" || (o.type.include? "[]")
2237              assert_has_object_eq(acc_tag.u64)
2238            elsif ([o.type] & ['none', 'top', 'any']).empty?
2239              assert_has_object_ne(acc_tag.u64)
2240            end
2241          elsif o.reg?
2242            # No need check virtual register tag, in case mov.obj and deoptimized frame.
2243            #   newobj v1, #some_record#
2244            #      ...
2245            #   mov.obj v2, v1
2246            #   mov.obj v2, v3
2247            # Object in v1 below first "mov.obj" is dead (can be deleted, because isn't used anywhere in method below).
2248            # Instruction "mov" don't exist in compiler, object in v1 dead for compiler early, so isn't written in nearest SaveState above mov.
2249            # If deoptimization happen, value of register v1 will be incorrect in interpreter. Assert on tag obj in first mov.obj, which is written below this comment
2250            # will fail, but it doesn't matter, because object isn't used below.
2251
2252            if handler_name.start_with? "MOV_OBJ"
2253              frame_flags := LoadI(%frame).Imm(Constants::FRAME_FLAGS_OFFSET).word
2254              If(And(frame_flags, "Frame::IS_DEOPTIMIZED").word, 0).NE.Unlikely {
2255                Goto(:SkipCheck)
2256              }
2257            end
2258            if o.type == "ref" || (o.type.include? "[]")
2259              assert_has_object_eq(get_tag(vreg_ptr(o)))
2260            elsif ([o.type] & ['none', 'top', 'any']).empty?
2261              assert_has_object_ne(get_tag(vreg_ptr(o)))
2262            end
2263            Label(:SkipCheck)
2264          end
2265        end
2266      end
2267    end
2268
2269    case handler_name
2270    when "NOP"
2271    # mov
2272    when "MOVI_V4_IMM4", "MOVI_V8_IMM8"
2273      handle_movi(vreg_ptr(op[0]), i8toi32(as_imm(op[1])))
2274    when "MOVI_V8_IMM16"
2275      handle_movi(vreg_ptr(op[0]), i16toi32(as_imm(op[1])))
2276    when "MOVI_V8_IMM32"
2277      handle_movi(vreg_ptr(op[0]), as_imm(op[1]))
2278    when "MOVI_64_V8_IMM64"
2279      handle_movi_64(vreg_ptr(op[0]), as_imm(op[1]))
2280    when "MOV_V4_V4", "MOV_V8_V8", "MOV_V16_V16"
2281      handle_mov(vreg_ptr(op[0]), vreg_value(op[1]).u32)
2282    when "MOV_64_V4_V4", "MOV_64_V16_V16"
2283      handle_mov_64(vreg_ptr(op[0]), vreg_value(op[1]).u64)
2284    when "MOV_OBJ_V4_V4", "MOV_OBJ_V8_V8", "MOV_OBJ_V16_V16"
2285      handle_mov_obj(vreg_ptr(op[0]), vreg_value(op[1]).ref)
2286    when "MOV_NULL_V8"
2287      handle_mov_null_v8(vreg_ptr(op[0]))
2288    when "FMOVI_PREF_V8_IMM32"
2289      handle_fmovi_v8_imm(vreg_ptr(op[0]), as_imm(op[1]))
2290    when "FMOVI_64_V8_IMM64"
2291      handle_fmovi_64_v8_imm(vreg_ptr(op[0]), as_imm(op[1]).f64)
2292    # lda
2293    when "LDA_V8"
2294      handle_lda(vreg_value(op[0]).u32)
2295    when "LDA_64_V8"
2296      handle_lda_64(vreg_value(op[0]).u64)
2297    when "LDA_OBJ_V8"
2298      handle_lda_obj_v8(vreg_value(op[0]).ref)
2299    when "LDA_STR_ID32"
2300      handle_lda_str_id32(as_id(op[0]))
2301    when "LDA_TYPE_ID16"
2302      handle_lda_type_id16(as_id(op[0]))
2303    when "LDA_CONST_V8_ID16"
2304      handle_lda_const_v8_id16(vreg_ptr(op[0]), as_id(op[1]))
2305    when "LDAI_IMM8"
2306      handle_ldai_imm(i8toi32(as_imm(op[0])))
2307    when "LDAI_IMM16"
2308      handle_ldai_imm(i16toi32(as_imm(op[0])))
2309    when "LDAI_IMM32"
2310      handle_ldai_imm(as_imm(op[0]))
2311    when "LDAI_64_IMM64"
2312      handle_ldai_64_imm(as_imm(op[0]))
2313    when "FLDAI_PREF_IMM32"
2314      handle_fldai_imm(as_imm(op[0]))
2315    when "FLDAI_64_IMM64"
2316      handle_fldai_64_imm(as_imm(op[0]))
2317    when "LDA_NULL"
2318      handle_lda_null()
2319    when "LENARR_V8"
2320      handle_lenarr_v8(vreg_value(op[0]).ref)
2321    when "LDARR_V8"
2322      handle_ldarr_v8(vreg_value(op[0]).ref)
2323    when "LDARR_8_V8"
2324      handle_ldarr_8_v8(vreg_value(op[0]).ref)
2325    when "LDARR_16_V8"
2326      handle_ldarr_16_v8(vreg_value(op[0]).ref)
2327    when "LDARRU_8_V8"
2328      handle_ldarru_8_v8(vreg_value(op[0]).ref)
2329    when "LDARRU_16_V8"
2330      handle_ldarru_16_v8(vreg_value(op[0]).ref)
2331    when "LDARR_64_V8"
2332      handle_ldarr_64_v8(vreg_value(op[0]).ref)
2333    when "FLDARR_32_V8"
2334      handle_fldarr_32_v8(vreg_value(op[0]).ref)
2335    when "FLDARR_64_V8"
2336      handle_fldarr_64_v8(vreg_value(op[0]).ref)
2337    when "LDARR_OBJ_V8"
2338      handle_ldarr_obj_v8(vreg_value(op[0]).ref)
2339    when "LDOBJ_V8_ID16"
2340      handle_ldobj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2341    when "LDOBJ_V_V4_V4_ID16"
2342      handle_ldobj_v_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
2343    when "LDOBJ_64_V8_ID16"
2344      handle_ldobj_64_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2345    when "LDOBJ_V_64_V4_V4_ID16"
2346      handle_ldobj_v_64_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
2347    when "LDOBJ_OBJ_V8_ID16"
2348      handle_ldobj_obj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2349    when "LDOBJ_V_OBJ_V4_V4_ID16"
2350      handle_ldobj_v_obj_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
2351    when "LDSTATIC_ID16"
2352      handle_ldstatic_id16(as_id(op[0]))
2353    when "LDSTATIC_64_ID16"
2354      handle_ldstatic_64_id16(as_id(op[0]))
2355    when "LDSTATIC_OBJ_ID16"
2356      handle_ldstatic_obj_id16(as_id(op[0]))
2357    # sta
2358    when "STA_V8"
2359      handle_sta_v8(vreg_ptr(op[0]))
2360    when "STA_64_V8"
2361      handle_sta_64_v8(vreg_ptr(op[0]))
2362    when "STA_OBJ_V8"
2363      handle_sta_obj_v8(vreg_ptr(op[0]))
2364    when "STARR_V4_V4"
2365      handle_starr_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2366    when "STARR_8_V4_V4"
2367      handle_starr_8_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2368    when "STARR_16_V4_V4"
2369      handle_starr_16_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2370    when "STARR_64_V4_V4"
2371      handle_starr_64_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2372    when "FSTARR_32_V4_V4"
2373      handle_fstarr_32_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2374    when "FSTARR_64_V4_V4"
2375      handle_fstarr_64_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2376    when "STARR_OBJ_V4_V4"
2377      handle_starr_obj_v4_v4(vreg_value(op[0]).ref, vreg_value(op[1]).i32)
2378    when "STOBJ_V8_ID16"
2379      handle_stobj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2380    when "STOBJ_64_V8_ID16"
2381      handle_stobj_64_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2382    when "STOBJ_OBJ_V8_ID16"
2383      handle_stobj_obj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
2384    when "STOBJ_V_V4_V4_ID16"
2385      handle_stobj_v_v4_v4_id16(vreg_value(op[0]).u32, vreg_value(op[1]).ref, as_id(op[2]))
2386    when "STOBJ_V_64_V4_V4_ID16"
2387      handle_stobj_v_64_v4_v4_id16(vreg_value(op[0]).u64, vreg_value(op[1]).ref, as_id(op[2]))
2388    when "STOBJ_V_OBJ_V4_V4_ID16"
2389      handle_stobj_v_obj_v4_v4_id16(vreg_value(op[0]).ref, vreg_value(op[1]).ref, as_id(op[2]))
2390    when "STSTATIC_ID16"
2391      handle_ststatic_id16(as_id(op[0]))
2392    when "STSTATIC_64_ID16"
2393      handle_ststatic_64_id16(as_id(op[0]))
2394    when "STSTATIC_OBJ_ID16"
2395      handle_ststatic_obj_id16(as_id(op[0]))
2396    # jmp
2397    when "JMP_IMM8"
2398      pc := handle_jmp_imm(pc, i8toi32(as_imm(op[0])))
2399    when "JMP_IMM16"
2400      pc := handle_jmp_imm(pc, i16toi32(as_imm(op[0])))
2401    when "JMP_IMM32"
2402      pc := handle_jmp_imm(pc, as_imm(op[0]))
2403    # conditional jumps
2404    # NB! Better not to load jump offset when condition is false
2405    when "JEQ_V8_IMM8"
2406      pc := handle_jeq_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2407    when "JEQ_V8_IMM16"
2408      pc := handle_jeq_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2409    when "JNE_V8_IMM8"
2410      pc := handle_jne_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2411    when "JNE_V8_IMM16"
2412      pc := handle_jne_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2413    when "JLT_V8_IMM8"
2414      pc := handle_jlt_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2415    when "JLT_V8_IMM16"
2416      pc := handle_jlt_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2417    when "JGT_V8_IMM8"
2418      pc := handle_jgt_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2419    when "JGT_V8_IMM16"
2420      pc := handle_jgt_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2421    when "JLE_V8_IMM8"
2422      pc := handle_jle_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2423    when "JLE_V8_IMM16"
2424      pc := handle_jle_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2425    when "JGE_V8_IMM8"
2426      pc := handle_jge_v8_imm8(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2427    when "JGE_V8_IMM16"
2428      pc := handle_jge_v8_imm16(pc, vreg_value(op[0]).i32, as_imm(op[1]), i.format.size)
2429    when "JEQZ_IMM8"
2430      pc := handle_jeqz_imm8(pc, as_imm(op[0]), i.format.size)
2431    when "JEQZ_IMM16"
2432      pc := handle_jeqz_imm16(pc, as_imm(op[0]), i.format.size)
2433    when "JNEZ_IMM8"
2434      pc := handle_jnez_imm8(pc, as_imm(op[0]), i.format.size)
2435    when "JNEZ_IMM16"
2436      pc := handle_jnez_imm16(pc, as_imm(op[0]), i.format.size)
2437    when "JLTZ_IMM8"
2438      pc := handle_jltz_imm8(pc, as_imm(op[0]), i.format.size)
2439    when "JLTZ_IMM16"
2440      pc := handle_jltz_imm16(pc, as_imm(op[0]), i.format.size)
2441    when "JGTZ_IMM8"
2442      pc := handle_jgtz_imm8(pc, as_imm(op[0]), i.format.size)
2443    when "JGTZ_IMM16"
2444      pc := handle_jgtz_imm16(pc, as_imm(op[0]), i.format.size)
2445    when "JLEZ_IMM8"
2446      pc := handle_jlez_imm8(pc, as_imm(op[0]), i.format.size)
2447    when "JLEZ_IMM16"
2448      pc := handle_jlez_imm16(pc, as_imm(op[0]), i.format.size)
2449    when "JGEZ_IMM8"
2450      pc := handle_jgez_imm8(pc, as_imm(op[0]), i.format.size)
2451    when "JGEZ_IMM16"
2452      pc := handle_jgez_imm16(pc, as_imm(op[0]), i.format.size)
2453    when "JNEZ_OBJ_IMM8"
2454      pc := handle_jnez_obj_imm8(pc, as_imm(op[0]), i.format.size)
2455    when "JNEZ_OBJ_IMM16"
2456      pc := handle_jnez_obj_imm16(pc, as_imm(op[0]), i.format.size)
2457    when "JEQZ_OBJ_IMM8"
2458      pc := handle_jeqz_obj_imm8(pc, as_imm(op[0]), i.format.size)
2459    when "JEQZ_OBJ_IMM16"
2460      pc := handle_jeqz_obj_imm16(pc, as_imm(op[0]), i.format.size)
2461    when "JNE_OBJ_V8_IMM8"
2462      pc := handle_jne_obj_v8_imm8(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2463    when "JNE_OBJ_V8_IMM16"
2464      pc := handle_jne_obj_v8_imm16(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2465    when "JEQ_OBJ_V8_IMM8"
2466      pc := handle_jeq_obj_v8_imm8(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2467    when "JEQ_OBJ_V8_IMM16"
2468      pc := handle_jeq_obj_v8_imm16(pc, vreg_value(op[0]).ref, as_imm(op[1]), i.format.size)
2469    # cmp
2470    when "FCMPG_PREF_V8"
2471      handle_fcmpg_v8(vreg_value(op[0]).f32)
2472    when "FCMPG_64_V8"
2473      handle_fcmpg_64_v8(vreg_value(op[0]).f64)
2474    when "FCMPL_PREF_V8"
2475      handle_fcmpl_v8(vreg_value(op[0]).f32)
2476    when "FCMPL_64_V8"
2477      handle_fcmpl_64_v8(vreg_value(op[0]).f64)
2478    when "UCMP_PREF_V8"
2479      handle_ucmp(acc.u32, vreg_value(op[0]).u32)
2480    when "UCMP_64_PREF_V8"
2481      handle_ucmp(acc.u64, vreg_value(op[0]).u64)
2482    when "CMP_64_V8"
2483      handle_cmp(acc.i64, vreg_value(op[0]).i64)
2484    # add
2485    when "ADD_V4_V4"
2486      handle_add_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2487    when "ADDV_V4_V4"
2488      handle_add_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2489    when "INCI_V4_IMM4"
2490      handle_inci_v4_imm4(vreg_ptr(op[0]), i8toi32(as_imm(op[1])))
2491    when "ADDI_IMM8"
2492      handle_addi_imm(i8toi32(as_imm(op[0])))
2493    when "ADDIV_V4_V4_IMM8"
2494      handle_addi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2495    when "ADD2_V8"
2496      handle_add2_v8(vreg_value(op[0]).i32)
2497    when "ADD2_64_V8"
2498      handle_add2_64_v8(vreg_value(op[0]).i64)
2499    when "FADD2_64_V8"
2500      handle_fadd2_64_v8(vreg_value(op[0]).f64)
2501    when "FADD2_PREF_V8"
2502      handle_fadd2_v8(vreg_value(op[0]).f32)
2503    when "ADD2V_V8_V8"
2504      handle_add2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2505    when "ADD2V_64_V8_V8"
2506      handle_add2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2507    when "FADD2V_64_V8_V8"
2508      handle_fadd2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2509    when "FADD2V_PREF_V8_V8"
2510      handle_fadd2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2511    # sub
2512    when "FSUB2_PREF_V8"
2513      handle_fsub2_v8(vreg_value(op[0]).f32)
2514    when "FSUB2V_PREF_V8_V8"
2515      handle_fsub2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2516    when "SUB_V4_V4"
2517      handle_sub_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2518    when "SUBV_V4_V4"
2519      handle_sub_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2520    when "SUB2_V8"
2521      handle_sub2_v8(vreg_value(op[0]).i32)
2522    when "SUB2_64_V8"
2523      handle_sub2_64_v8(vreg_value(op[0]).i64)
2524    when "SUB2V_V8_V8"
2525      handle_sub2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2526    when "SUB2V_64_V8_V8"
2527      handle_sub2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2528    when "SUBI_IMM8"
2529      handle_subi_imm(i8toi32(as_imm(op[0])))
2530    when "SUBIV_V4_V4_IMM8"
2531      handle_subi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2532    when "FSUB2_64_V8"
2533      handle_fsub2_64_v8(vreg_value(op[0]).f64)
2534    when "SUB2_V8"
2535      handle_sub2_v8(vreg_value(op[0]).i32)
2536    when "FSUB2_64_V8"
2537      handle_fsub2_64_v8(vreg_value(op[0]).f64)
2538    when "FSUB2V_64_V8_V8"
2539      handle_fsub2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2540    when "SUB2V_V8_V8"
2541      handle_sub2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2542    when "FSUB2V_64_V8_V8"
2543      handle_fsub2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2544    # mul
2545    when "MUL_V4_V4"
2546      handle_mul_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2547    when "MULV_V4_V4"
2548      handle_mul_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2549    when "MUL2_V8"
2550      handle_mul2_v8(vreg_value(op[0]).i32)
2551    when "FMUL2_PREF_V8"
2552      handle_fmul2_v8(vreg_value(op[0]).f32)
2553    when "MUL2_64_V8"
2554      handle_mul2_64_v8(vreg_value(op[0]).i64)
2555    when "MUL2V_V8_V8"
2556      handle_mul2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2557    when "FMUL2V_PREF_V8_V8"
2558      handle_fmul2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2559    when "MUL2V_64_V8_V8"
2560      handle_mul2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2561    when "MULI_IMM8"
2562      handle_muli_imm(i8toi32(as_imm(op[0])))
2563    when "MULIV_V4_V4_IMM8"
2564      handle_muli_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2565    when "FMUL2_64_V8"
2566      handle_fmul2_64_v8(vreg_value(op[0]).f64)
2567    when "FMUL2V_64_V8_V8"
2568      handle_fmul2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2569    # div
2570    when "FDIV2_PREF_V8"
2571      handle_fdiv2_v8(vreg_value(op[0]).f32)
2572    when "FDIV2_64_V8"
2573      handle_fdiv2_64_v8(vreg_value(op[0]).f64)
2574    when "FDIV2V_PREF_V8_V8"
2575      handle_fdiv2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2576    when "FDIV2V_64_V8_V8"
2577      handle_fdiv2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2578    when "DIV_V4_V4"
2579      handle_div_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2580    when "DIVV_V4_V4"
2581      handle_div_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2582    when "DIV2_V8"
2583      handle_div2_v8(vreg_value(op[0]).i32)
2584    when "DIV2V_V8_V8"
2585      handle_div2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2586    when "DIVI_IMM8"
2587      handle_divi_imm(i8toi32(as_imm(op[0])))
2588    when "DIVIV_V4_V4_IMM8"
2589      handle_divi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2590    when "DIV2_64_V8"
2591      handle_div2_64_v8(vreg_value(op[0]).i64)
2592    when "DIVU2_PREF_V8"
2593      handle_divu2_v8(vreg_value(op[0]).i32)
2594    when "DIVU2_64_PREF_V8"
2595      handle_divu2_64_v8(vreg_value(op[0]).i64)
2596    when "DIV2V_64_V8_V8"
2597      handle_div2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2598    when "DIVU2V_PREF_V8_V8"
2599      handle_divu2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2600    when "DIVU2V_64_PREF_V8_V8"
2601      handle_divu2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2602    # mod
2603    when "FMOD2_PREF_V8"
2604      handle_fmod2_v8(vreg_value(op[0]).f32)
2605    when "FMOD2_64_V8"
2606      handle_fmod2_64_v8(vreg_value(op[0]).f64)
2607    when "FMOD2V_PREF_V8_V8"
2608      handle_fmod2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f32)
2609    when "FMOD2V_64_V8_V8"
2610      handle_fmod2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).f64)
2611    when "MOD_V4_V4"
2612      handle_mod_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2613    when "MODV_V4_V4"
2614      handle_mod_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2615    when "MOD2_V8"
2616      handle_mod2_v8(vreg_value(op[0]).i32)
2617    when "MOD2V_V8_V8"
2618      handle_mod2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2619    when "MODI_IMM8"
2620      handle_modi_imm(i8toi32(as_imm(op[0])))
2621    when "MODIV_V4_V4_IMM8"
2622      handle_modi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2623    when "MOD2_64_V8"
2624      handle_mod2_64_v8(vreg_value(op[0]).i64)
2625    when "MODU2_PREF_V8"
2626      handle_modu2_v8(vreg_value(op[0]).u32)
2627    when "MODU2_64_PREF_V8"
2628      handle_modu2_64_v8(vreg_value(op[0]).u64)
2629    when "MOD2V_64_V8_V8"
2630      handle_mod2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2631    when "MODU2V_PREF_V8_V8"
2632      handle_modu2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).u32)
2633    when "MODU2V_64_PREF_V8_V8"
2634      handle_modu2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).u64)
2635    # neg
2636    when "FNEG_64"
2637      handle_fneg_64()
2638    when "FNEG_PREF_NONE"
2639      handle_fneg()
2640    # and
2641    when "AND2_PREF_V8"
2642      handle_and2_v8(vreg_value(op[0]).i32)
2643    when "AND2_64_PREF_V8"
2644      handle_and2_64_v8(vreg_value(op[0]).i64)
2645    when "AND2V_PREF_V8_V8"
2646      handle_and2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2647    when "AND2V_64_PREF_V8_V8"
2648      handle_and2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2649    when "ANDI_IMM32"
2650      handle_andi_imm(as_imm(op[0]))
2651    when "ANDIV_V4_V4_IMM32"
2652      handle_andi_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2653    when "AND_PREF_V4_V4"
2654      handle_and_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2655    when "ANDV_PREF_V4_V4"
2656      handle_and_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2657    # or
2658    when "OR2_PREF_V8"
2659      handle_or2_v8(vreg_value(op[0]).i32)
2660    when "OR2_64_PREF_V8"
2661      handle_or2_64_v8(vreg_value(op[0]).i64)
2662    when "OR2V_PREF_V8_V8"
2663      handle_or2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2664    when "OR2V_64_PREF_V8_V8"
2665      handle_or2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2666    when "ORI_IMM32"
2667      handle_ori_imm(as_imm(op[0]))
2668    when "ORIV_V4_V4_IMM32"
2669      handle_ori_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2670    when "OR_PREF_V4_V4"
2671      handle_or_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2672    when "ORV_PREF_V4_V4"
2673      handle_or_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2674    # ashr
2675    when "ASHR2_PREF_V8"
2676      handle_ashr2_v8(vreg_value(op[0]).i32)
2677    when "ASHR2_64_PREF_V8"
2678      handle_ashr2_64_v8(vreg_value(op[0]).i64)
2679    when "ASHR2V_PREF_V8_V8"
2680      handle_ashr2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2681    when "ASHR2V_64_PREF_V8_V8"
2682      handle_ashr2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2683    when "ASHRI_IMM8"
2684      handle_ashri_imm(as_imm(op[0]))
2685    when "ASHRIV_V4_V4_IMM8"
2686      handle_ashri_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2687    when "ASHR_PREF_V4_V4"
2688      handle_ashr_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2689    when "ASHRV_PREF_V4_V4"
2690      handle_ashr_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2691    # shr
2692    when "SHRI_IMM8"
2693      handle_shri_imm(i8toi32(as_imm(op[0])))
2694    when "SHRIV_V4_V4_IMM8"
2695      handle_shri_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2696    when "SHR_PREF_V4_V4"
2697      handle_shr_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2698    when "SHRV_PREF_V4_V4"
2699      handle_shr_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2700    when "SHR2_PREF_V8"
2701      handle_shr2_v8(vreg_value(op[0]).i32)
2702    when "SHR2_64_PREF_V8"
2703      handle_shr2_64_v8(vreg_value(op[0]).i64)
2704    when "SHR2V_PREF_V8_V8"
2705      handle_shr2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2706    when "SHR2V_64_PREF_V8_V8"
2707      handle_shr2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2708    # xor
2709    when "XOR2_PREF_V8"
2710      handle_xor2_v8(vreg_value(op[0]).i32)
2711    when "XOR2_64_PREF_V8"
2712      handle_xor2_64_v8(vreg_value(op[0]).i64)
2713    when "XOR2V_PREF_V8_V8"
2714      handle_xor2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2715    when "XOR2V_64_PREF_V8_V8"
2716      handle_xor2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2717    when "XORI_PREF_IMM32"
2718      handle_xori_imm(as_imm(op[0]))
2719    when "XORIV_PREF_V4_V4_IMM32"
2720      handle_xori_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_imm(op[2]))
2721    when "XOR_PREF_V4_V4"
2722      handle_xor_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2723    when "XORV_PREF_V4_V4"
2724      handle_xor_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2725    # shl
2726    when "SHLI_IMM8"
2727      handle_shli_imm(i8toi32(as_imm(op[0])))
2728    when "SHLIV_V4_V4_IMM8"
2729      handle_shli_v4_v4_imm(vreg_ptr(op[0]), vreg_value(op[1]).i32, i8toi32(as_imm(op[2])))
2730    when "SHL_PREF_V4_V4"
2731      handle_shl_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
2732    when "SHLV_PREF_V4_V4"
2733      handle_shl_v_v4_v4(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2734    when "SHL2_PREF_V8"
2735      handle_shl2_v8(vreg_value(op[0]).i32)
2736    when "SHL2_64_PREF_V8"
2737      handle_shl2_64_v8(vreg_value(op[0]).i64)
2738    when "SHL2V_PREF_V8_V8"
2739      handle_shl2_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i32)
2740    when "SHL2V_64_PREF_V8_V8"
2741      handle_shl2_64_v8_v8(vreg_ptr(op[0]), vreg_value(op[1]).i64)
2742    # not
2743    when "NOT_PREF_NONE"
2744      handle_not()
2745    when "NOT_64_PREF_NONE"
2746      handle_not_64()
2747    # neg
2748    when "NEG"
2749      handle_neg()
2750    when "NEG_64"
2751      handle_neg_64()
2752    # new
2753    when "NEWARR_V4_V4_ID16"
2754      handle_newarr_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).i32, as_id(op[2]))
2755    when "NEWOBJ_V8_ID16"
2756      handle_newobj_v8_id16(vreg_ptr(op[0]), as_id(op[1]))
2757    # checks
2758    when "ISINSTANCE_ID16"
2759      handle_isinstance_id16(as_id(op[0]))
2760    when "CHECKCAST_ID16"
2761      handle_checkcast_id16(as_id(op[0]))
2762    # cast
2763    when "I32TOU1_PREF_NONE"
2764      handle_i32tou1()
2765    when "I64TOU1_PREF_NONE"
2766      handle_i64tou1()
2767    when "U32TOU1_PREF_NONE"
2768      handle_u32tou1()
2769    when "U64TOU1_PREF_NONE"
2770      handle_u64tou1()
2771    when "I32TOI64_PREF_NONE"
2772      handle_i32toi64()
2773    when "I32TOI16_PREF_NONE"
2774      handle_i32toi16()
2775    when "I32TOU16_PREF_NONE"
2776      handle_i32tou16()
2777    when "I32TOI8_PREF_NONE"
2778      handle_i32toi8()
2779    when "I32TOU8_PREF_NONE"
2780      handle_i32tou8()
2781    when "I64TOI32_PREF_NONE"
2782      handle_i64toi32()
2783    when "U32TOI64_PREF_NONE"
2784      handle_u32toi64()
2785    when "U32TOI16_PREF_NONE"
2786      handle_u32toi16()
2787    when "U32TOU16_PREF_NONE"
2788      handle_u32tou16()
2789    when "U32TOI8_PREF_NONE"
2790      handle_u32toi8()
2791    when "U32TOU8_PREF_NONE"
2792      handle_u32tou8()
2793    when "U64TOI32_PREF_NONE"
2794      handle_u64toi32()
2795    when "U64TOU32_PREF_NONE"
2796      handle_u64tou32()
2797    when "I32TOF32_PREF_NONE"
2798      handle_i32tof32()
2799    when "I32TOF64_PREF_NONE"
2800      handle_i32tof64()
2801    when "U32TOF32_PREF_NONE"
2802      handle_u32tof32()
2803    when "U32TOF64_PREF_NONE"
2804      handle_u32tof64()
2805    when "I64TOF32_PREF_NONE"
2806      handle_i64tof32()
2807    when "I64TOF64_PREF_NONE"
2808      handle_i64tof64()
2809    when "U64TOF32_PREF_NONE"
2810      handle_u64tof32()
2811    when "U64TOF64_PREF_NONE"
2812      handle_u64tof64()
2813    when "F32TOF64_PREF_NONE"
2814      handle_f32tof64()
2815    when "F32TOI32_PREF_NONE"
2816      handle_f32toi32()
2817    when "F32TOI64_PREF_NONE"
2818      handle_f32toi64()
2819    when "F32TOU32_PREF_NONE"
2820      handle_f32tou32()
2821    when "F32TOU64_PREF_NONE"
2822      handle_f32tou64()
2823    when "F64TOI32_PREF_NONE"
2824      handle_f64toi32()
2825    when "F64TOI64_PREF_NONE"
2826      handle_f64toi64()
2827    when "F64TOU32_PREF_NONE"
2828      handle_f64tou32()
2829    when "F64TOU64_PREF_NONE"
2830      handle_f64tou64()
2831    when "F64TOF32_PREF_NONE"
2832      handle_f64tof32()
2833    # call
2834    when "CALL_SHORT_V4_V4_ID16"
2835      handle_call_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
2836    when "CALL_ACC_SHORT_V4_IMM4_ID16"
2837      handle_call_acc_short_v4_imm4_id16(op[1], as_imm(op[2]), as_id(op[0]), i.format.size)
2838    when "CALL_ACC_V4_V4_V4_IMM4_ID16"
2839      handle_call_acc_v4_v4_v4_imm4_id16(op[1], op[2], op[3], as_imm(op[4]), as_id(op[0]), i.format.size)
2840    when "CALL_V4_V4_V4_V4_ID16"
2841      handle_call_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
2842    when "CALL_RANGE_V8_ID16"
2843      handle_call_range_v8_id16(op[1], as_id(op[0]), i.format.size)
2844    when "CALL_VIRT_SHORT_V4_V4_ID16"
2845      handle_call_virt_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
2846    when "CALL_VIRT_ACC_SHORT_V4_IMM4_ID16"
2847      handle_call_virt_acc_short_v4_imm4_id16(op[1], as_imm(op[2]), as_id(op[0]), i.format.size)
2848    when "CALL_VIRT_V4_V4_V4_V4_ID16"
2849      handle_call_virt_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
2850    when "CALL_VIRT_ACC_V4_V4_V4_IMM4_ID16"
2851      handle_call_virt_acc_v4_v4_v4_imm4_id16(op[1], op[2], op[3], as_imm(op[4]), as_id(op[0]), i.format.size)
2852    when "CALL_VIRT_RANGE_V8_ID16"
2853      handle_call_virt_range_v8_id16(op[1], as_id(op[0]), i.format.size)
2854    when "INITOBJ_SHORT_V4_V4_ID16"
2855      handle_initobj_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
2856    when "INITOBJ_V4_V4_V4_V4_ID16"
2857      handle_initobj_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
2858    when "INITOBJ_RANGE_V8_ID16"
2859      handle_initobj_range_v8_id16(op[1], as_id(op[0]), i.format.size)
2860    # return
2861    when "RETURN_VOID"
2862      handle_return_void()
2863    when "RETURN"
2864      handle_return()
2865    when "RETURN_64"
2866      handle_return_64()
2867    when "RETURN_OBJ"
2868      handle_return_obj()
2869    # dyn
2870    when "MOV_DYN_V8_V8"
2871      set_value(vreg_ptr(op[0]), vreg_value(op[1]).any).any
2872    when "STA_DYN_V8"
2873      set_value(vreg_ptr(op[0]), acc.any).any
2874    when "LDA_DYN_V8"
2875      acc := vreg_value(op[0]).any
2876    when "LDAI_DYN_IMM32"
2877      acc := i32toany(as_imm(op[0]).i32)
2878    when "FLDAI_DYN_IMM64"
2879      acc := f64toany(as_imm(op[0]).f64)
2880    # throw
2881    when "THROW_V8"
2882      handle_throw(vreg_value(op[0]).ref)
2883
2884include_plugin 'interpreter_main_loop'
2885
2886    else
2887      Intrinsic(:UNREACHABLE).Terminator.void
2888    end
2889
2890    if (i.properties & ['jump', 'call', 'return']).empty?
2891      if !i.exceptions.include?('x_throw')
2892        if i.exceptions.include?('x_ecma')
2893          If(exception_val(), 0).NE.Unlikely {
2894            pc_eh := find_catch_block()
2895          } Else {
2896            pc_inc := advance_pc_imm(pc, i.format.size)
2897          }
2898          frame := Phi(frame_eh, frame).ptr
2899          if Options.arm64?
2900            moffset := Phi(moffset_eh, moffset).word
2901            method_ptr := Phi(method_ptr_eh, method_ptr).ptr
2902          end
2903          pc := Phi(pc_eh, pc_inc).ptr
2904          acc := Phi(acc_eh.any, acc.any).any
2905        else
2906          pc := advance_pc_imm(pc, i.format.size)
2907        end
2908      end
2909    end
2910
2911    dst_acc_type = i.acc_and_operands.select(&:dst?).select(&:acc?).first&.type&.to_sym
2912    src_acc_type = i.acc_and_operands.select(&:src?).select(&:acc?).first&.type&.to_sym
2913
2914    acc_type = dst_acc_type || src_acc_type || :u64
2915
2916    acc_type = acc_type_map[acc_type] || acc_type
2917    if acc_type == :f32
2918      acc := Bitcast(acc.f32).SrcType("DataType::FLOAT32").u32
2919      acc_type = :u32
2920    elsif acc_type == :f64
2921      acc := Bitcast(acc.f64).SrcType("DataType::FLOAT64").u64
2922      acc_type = :u64
2923    end
2924    LiveOut(acc).DstReg(regmap[:acc]).send(acc_type)
2925    LiveOut(acc_tag).DstReg(regmap[:acc_tag]).ptr  # actually u64 but let's correspond to LiveIn's type
2926    LiveOut(frame).DstReg(regmap[:frame]).ptr
2927    if Options.arm64?
2928      LiveOut(moffset).DstReg(regmap[:moffset]).word
2929      LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
2930    end
2931    LiveOut(tr).DstReg(regmap[:tr]).ptr
2932
2933    dispatch(table, pc)
2934  end
2935end
2936
2937Panda.prefixes.each do |p|
2938  function("HANDLE_FAST_#{p.handler_name}",
2939           regmap: handler_regmap,
2940           regalloc_set: $panda_mask,
2941           mode: [:Interpreter],
2942           validate: InterpreterValidation) do
2943    # Arm32 is not supported
2944    if Options.arch == :arm32
2945      Intrinsic(:UNREACHABLE).Terminator.void
2946      next
2947    end
2948    pc := %pc
2949    table := %dispatch
2950
2951    secondary_opcode := readbyte(pc, 1)
2952    offset_idx := AddI(u8toword(secondary_opcode)).Imm(Panda.dispatch_table.secondary_opcode_offset(p)).word
2953    offset := Mul(offset_idx, "WordSize()").word
2954    addr := Load(table, offset).ptr
2955
2956    LiveOut(%acc).DstReg(regmap[:acc]).u64
2957    LiveOut(%acc_tag).DstReg(regmap[:acc_tag]).u64
2958    LiveOut(pc).DstReg(regmap[:pc]).ptr
2959    LiveOut(table).DstReg(regmap[:dispatch]).ptr
2960    LiveOut(%frame).DstReg(regmap[:frame]).ptr
2961    if Options.arm64?
2962      LiveOut(%moffset).DstReg(regmap[:moffset]).word
2963      LiveOut(%method_ptr).DstReg(regmap[:method_ptr]).ptr
2964    end
2965    LiveOut(%tr).DstReg(regmap[:tr]).ptr
2966
2967    tail_call(addr)
2968  end
2969end
2970
2971function(:HANDLE_FAST_INVALID,
2972         regmap: handler_regmap,
2973         regalloc_set: $panda_mask,
2974         mode: [:Interpreter],
2975         validate: InterpreterValidation) do
2976  Intrinsic(:UNREACHABLE).Terminator.void
2977end
2978
2979function(:HANDLE_FAST_EXCEPTION,
2980         regmap: handler_regmap,
2981         regalloc_set: $panda_mask,
2982         mode: [:Interpreter],
2983         validate: InterpreterValidation) do
2984  # Arm32 is not supported
2985  if Options.arch == :arm32
2986    Intrinsic(:UNREACHABLE).Terminator.void
2987    next
2988  end
2989  table := %dispatch
2990  pc := %pc
2991
2992  #assert pending exception
2993  pc := find_catch_block()
2994  frame := frame_eh
2995  if Options.arm64?
2996    moffset := moffset_eh
2997    method_ptr := method_ptr_eh
2998  end
2999  load_to_acc_reg(acc_eh, acc_tag_eh)
3000
3001  LiveOut(acc).DstReg(regmap[:acc]).u64
3002  LiveOut(acc_tag).DstReg(regmap[:acc_tag]).u64
3003  LiveOut(frame).DstReg(regmap[:frame]).ptr
3004  if Options.arm64?
3005    LiveOut(moffset).DstReg(regmap[:moffset]).word
3006    LiveOut(method_ptr).DstReg(regmap[:method_ptr]).ptr
3007  end
3008  LiveOut(%tr).DstReg(regmap[:tr]).ptr
3009
3010  dispatch(table, pc)
3011end
3012