• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env ruby
2
3# Copyright (c) 2021-2022 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16include_relative 'common.irt'
17
18fixed_regmap = Regmap.new({
19  arm32: { dispatch: 12, pc: 4, frame: 8 },
20  arm64: { dispatch: 24, pc: 20, frame: 23 },
21  x86_64: { dispatch: 8, pc: 4, frame: 5 },
22})
23handler_regmap = $full_regmap + fixed_regmap
24
25def check_regmap(lhs, rhs, name)
26  regs_intersection = lhs.data.values & rhs.data.values
27  raise "Fixed register numbers should not intersect with '#{name}' regsiters" unless regs_intersection.empty?
28end
29
30if Options.arm64?  # other archs have no enough regs
31  # fixed registers assignment sanity checks:
32  check_regmap(fixed_regmap, $panda_regmap, 'panda')
33  check_regmap(fixed_regmap, $arch_regmap, 'arch')
34  check_regmap(fixed_regmap, $args_regmap, 'args')
35  check_regmap(fixed_regmap, $callers_regmap, 'caller')
36end
37
38InterpreterValidation = {
39  spills_count_max: 12  # should be synced with SPILL_SLOTS in codegen_interpreter.h
40}
41
42# Macros:
43
44# Casts:
45
46['8', '16'].each do |from|
47  ['u32', 'u64'].each do |to|
48    macro(:"u#{from}to#{to}") do |arg|
49      Cast(arg).SrcType("DataType::UINT#{from}").send(:"#{to}")
50    end
51  end
52end
53
54['8', '16'].each do |from|
55  macro(:"i#{from}toi32") do |arg|
56    Cast(arg).SrcType("DataType::INT#{from}").i32
57  end
58end
59
60[['u32', 'UINT32'], ['i32', 'INT32']].each do |from, from_type|
61  ['b', 'i8', 'u8', 'i16', 'u16', 'i64', 'u64'].each do |to|
62    macro(:"#{from}to#{to}") do |arg|
63      Cast(arg).SrcType("DataType::#{from_type}").send(:"#{to}")
64    end
65  end
66end
67
68['b', 'u32', 'i32'].each do |to|
69  macro(:"u64to#{to}") do |arg|
70    Cast(arg).SrcType("DataType::UINT64").send(:"#{to}")
71  end
72end
73
74['b', 'i32'].each do |to|
75  macro(:"i64to#{to}") do |arg|
76    Cast(arg).SrcType("DataType::INT64").send(:"#{to}")
77  end
78end
79
80[['u32', 'UINT32'], ['i32', 'INT32'], ['u64', 'UINT64'], ['i64', 'INT64']].each do |from, from_type|
81  ['f32', 'f64'].each do |to|
82    macro(:"#{from}to#{to}") do |arg|
83      Cast(arg).SrcType("DataType::#{from_type}").send(:"#{to}")
84    end
85  end
86end
87
88['f64', 'i32', 'u32', 'i64', 'u64'].each do |to|
89  macro(:"f32to#{to}") do |arg|
90    Cast(arg).SrcType("DataType::FLOAT32").send(:"#{to}")
91  end
92end
93
94['i32', 'u32', 'i64', 'u64', 'f32'].each do |to|
95  macro(:"f64to#{to}") do |arg|
96    Cast(arg).SrcType("DataType::FLOAT64").send(:"#{to}")
97  end
98end
99
100macro(:u8tou1) do |arg|
101  Cast(arg).SrcType("DataType::UINT8").b
102end
103
104macro(:u32tou1) do |arg|
105  Cast(arg).SrcType("DataType::UINT32").b
106end
107
108macro(:i8tou16) do |arg|
109  Cast(arg).SrcType("DataType::INT8").u16
110end
111
112macro(:i16tou16) do |arg|
113  Cast(arg).SrcType("DataType::INT16").u16
114end
115
116macro(:i16toi32) do |arg|
117  Cast(arg).SrcType("DataType::INT16").i32
118end
119
120macro(:i8tou32) do |arg|
121  Cast(arg).SrcType("DataType::INT8").u32
122end
123
124macro(:i16tou32) do |arg|
125  Cast(arg).SrcType("DataType::INT16").u32
126end
127
128macro(:i32tou32) do |arg|
129  Cast(arg).SrcType("DataType::INT32").u32
130end
131
132['u8', 'u16'].each do |from|
133  macro(:"#{from}toword") do |arg|
134    if Options.arch_64_bits?
135      send(:"#{from}tou64", arg)
136    else
137      send(:"#{from}tou32", arg)
138    end
139  end
140end
141
142macro(:u32toi32) do |arg|
143  Cast(arg).SrcType("DataType::UINT32").i32
144end
145
146macro(:i32tou64) do |arg|
147  Cast(arg).SrcType("DataType::INT32").u64
148end
149
150macro(:u32toword) do |arg|
151  if Options.arch_64_bits?
152    u32tou64(arg)
153  else
154    arg
155  end
156end
157
158macro(:i32tou8) do |arg|
159  Cast(arg).SrcType("DataType::INT32").u8
160end
161
162macro(:i32tou32) do |arg|
163  Cast(arg).SrcType("DataType::INT32").u32
164end
165
166macro(:i32tof64) do |arg|
167  Cast(arg).SrcType("DataType::INT32").f64
168end
169
170macro(:u64tou32) do |arg|
171  Cast(arg).SrcType("DataType::UINT64").u32
172end
173
174macro(:i64tou8) do |arg|
175  Cast(arg).SrcType("DataType::INT64").u8
176end
177
178macro(:i64tou32) do |arg|
179  Cast(arg).SrcType("DataType::INT64").u32
180end
181
182macro(:f64tou32) do |arg|
183  Cast(arg).SrcType("DataType::FLOAT64").u32
184end
185
186macro(:f64toi64) do |arg|
187  Cast(arg).SrcType("DataType::FLOAT64").i64
188end
189
190macro(:i32tou1) do |arg|
191  res := AddI(0).Imm(0).b
192  If(arg, 0).CC(:CC_NE).b {
193    res_1 := AddI(res).Imm(1).b
194  }
195  Phi(res, res_1).b
196end
197
198macro(:i32toany) do |arg|
199  CastValueToAnyType(arg).AnyType(Constants::DYN_INT_TYPE).any
200end
201
202macro(:f64toany) do |arg|
203  CastValueToAnyType(arg).AnyType(Constants::DYN_DOUBLE_TYPE).any
204end
205
206# Decoding
207
208macro(:readbyte) do |pc, offset|
209  LoadI(pc).Imm(offset).u8
210end
211
212macro(:read_lower_4bits) do |offset|
213  if Options.arm64?
214    imm := readbyte(pc, offset).u32
215    AndI(imm).Imm(0xf).u8
216  else
217    imm := readbyte(pc, offset).u8
218    AndI(imm).Imm(0xf).u8
219  end
220end
221
222macro(:signed_read_higher_4bits) do |offset|
223  if Options.arm64?
224    imm:= readbyte(pc, offset).i32
225    shl_imm := ShlI(imm).Imm(24).i32
226    i32toi8(AShrI(shl_imm).Imm(28).i32)
227  else
228    imm:= readbyte(pc, offset).i8
229    AShrI(imm).Imm(4).i8
230  end
231end
232
233macro(:read_higher_4bits) do |offset|
234  if Options.arm64?
235    imm:= readbyte(pc, offset).u32
236    shl_imm := ShlI(imm).Imm(24).u32
237    u32tou8(ShrI(shl_imm).Imm(28).u32)
238  else
239    imm:= readbyte(pc, offset).u8
240    ShrI(imm).Imm(4).u8
241  end
242end
243
244macro(:as_vreg_idx) do |operand|
245  raise 'Register is expected' unless operand.reg?
246
247  offset = operand.offset / 8
248
249  case operand.width
250  when 4
251    u8toword(operand.offset % 8 != 0 ? read_higher_4bits(offset) : read_lower_4bits(offset))
252  when 8
253    u8toword(readbyte(pc, offset))
254  when 16
255    u16toword(readbyte(pc, offset).u16)
256  end
257end
258
259macro(:as_id) do |operand|
260  raise 'ID is expected' unless operand.id?
261
262  offset = operand.offset / 8
263
264  case operand.width
265  when 16
266    readbyte(pc, offset).u16
267  when 32
268    readbyte(pc, offset).u32
269  end
270end
271
272macro(:as_imm) do |operand|
273  raise 'Immediate is expected' unless operand.imm?
274
275  offset = operand.offset / 8
276
277  case operand.width
278  when 4
279    operand.offset % 8 != 0 ? signed_read_higher_4bits(offset) : read_lower_4bits(offset)
280  when 8
281    readbyte(pc, offset).i8
282  when 16
283    readbyte(pc, offset).i16
284  when 32
285    if operand.type == 'f32'
286      readbyte(pc,offset).f32
287    else
288      readbyte(pc, offset).i32
289    end
290  when 64
291    if operand.type == 'f64'
292      readbyte(pc, offset).f64
293    else
294      readbyte(pc, offset).i64
295    end
296  end
297end
298
299macro(:ins_offset) do ||
300  instructions_offset := LoadI(%frame).Imm(Constants::FRAME_INSTRUCTIONS_OFFSET).ptr
301  Sub(pc, instructions_offset).u32
302end
303
304# Register access:
305
306macro(:frame_vreg_ptr) do |frame, vreg_idx|
307  vreg_offset := AddI(Mul(vreg_idx, Constants::VREGISTER_SIZE).word).Imm(Constants::VREGISTERS_OFFSET).word
308  Add(frame, vreg_offset).ptr
309end
310
311macro(:vreg_ptr) do |operand|
312  vreg_idx := as_vreg_idx(operand)
313  frame_vreg_ptr(%frame, vreg_idx)
314end
315
316macro(:acc_ptr_frame) do |frame|
317  Add(frame, Constants::GET_ACC_OFFSET).ptr
318end
319
320macro(:acc_ptr) do
321  acc_ptr_frame(%frame)
322end
323
324macro(:get_value) do |vreg_ptr|
325  LoadI(vreg_ptr).Imm(Constants::VREGISTER_VALUE_OFFSET).u64
326end
327
328macro(:set_value) do |vreg_ptr, val|
329  StoreI(vreg_ptr, val).Imm(Constants::VREGISTER_VALUE_OFFSET).u64
330end
331
332macro(:get_tag) do |vreg_ptr|
333  vreg_num := LoadI(%frame).Imm(Constants::VREGISTERS_NUM_OFFSET).u32
334  vreg_mirror_ptr := Add(vreg_ptr, Mul(u32toword(vreg_num), Constants::VREGISTER_SIZE).word).ptr
335  LoadI(vreg_mirror_ptr).Imm(Constants::VREGISTER_VALUE_OFFSET).i64
336end
337
338macro(:set_tag_frame) do |frame, vreg_ptr, tag|
339  vreg_num := LoadI(frame).Imm(Constants::VREGISTERS_NUM_OFFSET).u32
340  vreg_mirror_ptr := Add(vreg_ptr, Mul(u32toword(vreg_num), Constants::VREGISTER_SIZE).word).ptr
341  StoreI(vreg_mirror_ptr, tag).Imm(Constants::VREGISTER_VALUE_OFFSET).i64
342end
343
344macro(:set_tag) do |vreg_ptr, tag|
345  set_tag_frame(%frame, vreg_ptr, tag)
346end
347
348macro(:get_acc_tag) do |acc_ptr|
349  LoadI(acc_ptr).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
350end
351
352macro(:set_acc_tag) do |acc_ptr, tag|
353  StoreI(acc_ptr, tag).Imm(Constants::GET_ACC_MIRROR_OFFSET).i64
354end
355
356macro(:vreg_value) do |operand|
357  get_value(vreg_ptr(operand))
358end
359
360macro(:acc_value) do
361  get_value(acc_ptr)
362end
363
364macro(:set_primitive) do |v, value|
365  set_tag(v, 0x0)
366  set_value(v, value)
367end
368
369macro(:set_object) do |v, value|
370  set_tag(v, 0x1)
371  set_value(v, value)
372end
373
374macro(:set_acc_primitive) do |acc_ptr, value|
375  set_acc_tag(acc_ptr, 0x0)
376  set_value(acc_ptr, value)
377end
378
379macro(:set_acc_object) do |acc_ptr, value|
380  set_acc_tag(acc_ptr, 0x1)
381  set_value(acc_ptr, value)
382end
383
384macro(:has_object) do |v|
385  tag := get_tag(v)
386  is_object := AndI(tag).Imm(Constants::OBJECT_MASK).u64
387  IfImm(is_object).Imm(0).CC(:CC_NE).b {
388    has := 1
389  } Else {
390    has_not := 0
391  }
392  Phi(has, has_not).ptr
393end
394
395macro(:copy_acc) do |dst_ptr, src_ptr|
396  set_value(dst_ptr, get_value(src_ptr))
397  set_acc_tag(dst_ptr, get_acc_tag(src_ptr))
398end
399
400macro(:copy_reg) do |new_frame, dst_idx, src_operand|
401  dst_reg_ptr = frame_vreg_ptr(new_frame, dst_idx)
402  src_reg_ptr = vreg_ptr(src_operand)
403  set_value(dst_reg_ptr, get_value(src_reg_ptr))
404  set_tag_frame(new_frame, dst_reg_ptr, get_tag(src_reg_ptr))
405end
406
407# Helper macros:
408
409macro(:dispatch) do |table, pc|
410  opc := readbyte(pc, 0)
411  offset := Mul(u8toword(opc), "WordSize()").word
412  addr := Load(table, offset).ptr
413  LiveOut(pc).DstReg(regmap[:pc]).ptr
414  LiveOut(table).DstReg(regmap[:dispatch]).ptr
415  IndirectJump(addr)
416end
417
418macro(:call_runtime) do |sym, *args|
419  Call(*args).Method(sym)
420end
421
422macro(:advance_pc_imm) do |pc, imm|
423  AddI(pc).Imm(imm).ptr
424end
425
426macro(:advance_pc_var) do |pc, var|
427  Add(pc, var).ptr
428end
429
430macro(:acc_receiver) do |op, imm|
431  If(imm, 0).CC(:CC_EQ).b {
432    res1 := acc_value.ref
433  } Else {
434    res2 := vreg_value(op).ref
435  }
436  Phi(res1, res2).ref
437end
438
439macro(:generic_call) do |id, size, is_initobj, receiver, nargs, copy_lambda|
440  caller := LoadI(%frame).Imm("Frame::GetMethodOffset()").ptr
441  callee := call_runtime("GetCalleeMethodFromBytecodeId", caller, u16toword(id)).ptr
442  if receiver
443    callee := call_runtime("ResolveVirtualMethod", callee, receiver).ptr
444  end
445  if is_initobj
446      # TODO: multiarray for initobj
447      klass := u32toword(LoadI(callee).Imm("Method::GetClassOffset()").u32)
448      # TODO(mbolshov): handle nullptr for returned obj
449      obj := call_runtime("CreateObjectByClassInterpreter", %tr, klass).ptr
450      set_acc_object(acc_ptr, obj)
451  end
452  If(call_runtime("HasCompiledCode", callee).i32, 0).CC(:CC_NE).b {
453    call_runtime("InterpreterToCompiledCodeBridge", %pc, %frame, callee, %tr).void
454    StoreI(%tr, %frame).Imm("ManagedThread::GetFrameOffset()").ptr
455    pc_native := advance_pc_imm(%pc, size)
456  } Else {
457    num_vregs := call_runtime("GetNumVregsByMethod", callee).word
458    num_vregs := AddI(num_vregs).Imm(1).word if is_initobj
459    if nargs
460      num_args := nargs
461    else
462      num_args := call_runtime("GetNumArgsByMethod", callee).word
463    end
464    frame_size := Add(num_vregs, num_args).word
465    actual_size := Add(frame_size, frame_size).word
466    # TODO(mbolshov): Fast path for frame allocation should be done in irtoc
467    new_frame := call_runtime("CreateFrameWithSize", actual_size, frame_size, callee, %frame).ptr
468    StoreI(new_frame, "Frame::IS_STACKLESS").Imm("Frame::GetFlagsOffset()").word
469    if is_initobj
470      obj_vreg_ptr := frame_vreg_ptr(new_frame, SubI(num_vregs).Imm(1).word)
471      set_tag_frame(new_frame, obj_vreg_ptr, 0x1)
472      set_value(obj_vreg_ptr, obj)
473    end
474    copy_lambda.call(new_frame, num_vregs, num_args)
475    StoreI(new_frame, %frame).Imm("Frame::GetPrevFrameOffset()").ptr
476    StoreI(%tr, new_frame).Imm("ManagedThread::GetFrameOffset()").ptr
477    StoreI(%frame, advance_pc_imm(%pc, size)).Imm("Frame::GetNextInstructionOffset()").ptr
478    pc_int := call_runtime("GetInstructionsByMethod", callee).ptr
479  }
480  frame := Phi(%frame, new_frame).ptr
481  pc := Phi(pc_native, pc_int).ptr
482end
483
484macro(:generic_return) do |copy_lambda|
485  If(LoadI(%frame).Imm("Frame::GetFlagsOffset()").word, "Frame::IS_STACKLESS").CC(:CC_EQ).b {
486    prev_frame := LoadI(%frame).Imm("Frame::GetPrevFrameOffset()").ptr
487    next_pc := LoadI(prev_frame).Imm("Frame::GetNextInstructionOffset()").ptr
488    copy_lambda.call(prev_frame)
489    StoreI(%tr, prev_frame).Imm("ManagedThread::GetFrameOffset()").ptr
490    call_runtime("FreeFrame", frame).void
491    frame := prev_frame
492    pc := next_pc
493  } Else {
494    Intrinsic(:INTERPRETER_RETURN).ptr.Terminator
495  }
496end
497
498# Handlers:
499
500macro(:handle_movi) do |vd, imm|
501  set_primitive(vd, imm).i32
502end
503
504macro(:handle_movi_64) do |vd, imm|
505  set_primitive(vd, imm).i64
506end
507
508macro(:handle_mov) do |vd, vs|
509  # TODO(aantipina): add assert(!has_object(vs))
510  set_primitive(vd, vs).u32
511end
512
513macro(:handle_lda) do |vs|
514  set_acc_primitive(acc_ptr, vs).u32
515end
516
517macro(:handle_lda_str_id32) do |id|
518  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
519  string := call_runtime("ResolveStringEntrypoint", method_ptr, id).ptr
520  set_acc_object(acc_ptr, string).ref
521end
522
523macro(:handle_lda_type_id16) do |id|
524  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
525  # TODO: fix
526  type := call_runtime("ResolveStringEntrypoint", method_ptr, u16tou32(id)).ptr
527  set_acc_object(acc_ptr, type).ref
528end
529
530macro(:handle_lda_const_v8_id32) do |v, id|
531  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
532  cnst := call_runtime("ResolveLiteralArrayEntrypoint", method_ptr, id).ptr
533  set_object(v, cnst).ref
534end
535
536macro(:handle_ldai_imm) do |imm|
537  set_acc_primitive(acc_ptr, imm).i32
538end
539
540macro(:handle_ldai_64_imm) do |imm|
541  set_acc_primitive(acc_ptr, imm).i64
542end
543
544macro(:handle_fldai_imm) do |imm|
545  set_acc_primitive(acc_ptr, imm).f32
546end
547
548macro(:handle_fldai_64_imm) do |imm|
549  set_acc_primitive(acc_ptr, imm).f64
550end
551
552macro(:handle_add_v4_v4) do |vs1, vs2|
553  add := Add(vs1, vs2).i32
554  set_value(acc_ptr, add).i32
555end
556
557macro(:handle_fadd2_v8) do |vs|
558  add := Add(acc_value.f32, vs).f32
559  set_value(acc_ptr, add).f32
560end
561
562macro(:handle_sta_v8) do |vd|
563  set_primitive(vd, acc_value.u32).u32
564end
565
566macro(:handle_sta_64_v8) do |vd|
567  set_primitive(vd, acc_value.u64).u64
568end
569
570macro(:handle_jmp_imm) do |pc, imm|
571  advance_pc_var(pc, i32tou64(imm))
572end
573
574macro(:handle_jmp_imm32) do |pc, imm32|
575  advance_pc_var(pc, i32tou64(imm32))
576end
577
578macro(:handle_inci_v4_imm4) do |v, imm|
579  val := get_value(v)
580  add := Add(val.i32, imm).i32
581  set_value(v, add).i32
582end
583
584macro(:handle_cmp) do |acc, vs|
585  If(acc, vs).CC(:CC_LT).b {
586    set_value(acc_ptr, -1).i32
587  } Else {
588    If(acc, vs).CC(:CC_EQ).b {
589      set_value(acc_ptr, 0).i32
590    } Else {
591      set_value(acc_ptr, 1).i32
592    }
593  }
594end
595
596['Add', 'Sub', 'And', 'Mul', 'Or', 'Xor', 'Shl', 'Shr', 'AShr'].each do |op|
597  # v4 v4
598  macro(:"handle_#{op.downcase}_v4_v4") do |vs1, vs2|
599    v_ := send(op, vs1, vs2).i32
600    set_value(acc_ptr, v_).i32
601  end
602  # v8
603  macro(:"handle_#{op.downcase}2_v8") do |vs|
604    v_ := send(op, acc_value.i32, vs).i32
605    set_value(acc_ptr, v_).i32
606  end
607  # 64_v8
608  macro(:"handle_#{op.downcase}2_64_v8") do |vs|
609    v_ := send(op, acc_value.i64, vs).i64
610    set_value(acc_ptr, v_).i64
611  end
612  # imm
613  macro(:"handle_#{op.downcase}i_imm") do |imm|
614    v_ := send(op, acc_value.i32, imm).i32
615    set_value(acc_ptr, v_).i32
616  end
617end
618
619# Unary
620['Not', 'Neg'].each do |op|
621  macro(:"handle_#{op.downcase}") do
622    v_ := send(op, acc_value.i32).i32
623    set_value(acc_ptr, v_).i32
624  end
625  macro(:"handle_#{op.downcase}_64") do
626    v_ := send(op, acc_value.i64).i64
627    set_value(acc_ptr, v_).i64
628  end
629end
630
631['Div', 'Mod'].each do |op|
632  macro(:"handle_#{op.downcase}_v4_v4") do |vs1, vs2|
633    # TODO: exception if vs2 is 0
634    v_ := send(op, vs1, vs2).i32
635    set_value(acc_ptr, v_).i32
636  end
637  macro(:"handle_#{op.downcase}2_v8") do |vs|
638    # TODO: exception if vs is 0
639    v_ := send(op, acc_value.i32, vs).i32
640    set_value(acc_ptr, v_).i32
641  end
642  macro(:"handle_#{op.downcase}2_64_v8") do |vs|
643    # TODO: exception if vs is 0
644    v_ := send(op, acc_value.i64, vs).i64
645    set_value(acc_ptr, v_).i64
646  end
647  macro(:"handle_#{op.downcase}u2_v8") do |vs|
648    # TODO: exception if vs is 0
649    v_ := send(op, acc_value.u32, vs).u32
650    set_value(acc_ptr, v_).u32
651  end
652  macro(:"handle_#{op.downcase}u2_64_v8") do |vs|
653    # TODO: exception if vs is 0
654    v_ := send(op, acc_value.u64, vs).u64
655    set_value(acc_ptr, v_).u64
656  end
657  macro(:"handle_#{op.downcase}i_imm") do |imm|
658    # TODO: exception if imm is 0
659    v_ := send(op, acc_value.i32, imm).i32
660    set_value(acc_ptr, v_).i32
661  end
662end
663
664macro(:handle_newarr_v4_v4_id16) do |vd, vs, id|
665  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
666  array := call_runtime("CreateArrayByIdEntrypoint", method_ptr, u16tou32(id), vs.word).ptr
667  set_object(vd, array).ref
668end
669
670macro(:handle_lenarr_v8) do |vs|
671  ss := SaveState()
672  # TODO(aantipina): add assert(has_object(vs))
673  null_check := NullCheck(vs.ref, ss).ref
674  len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
675  set_acc_primitive(acc_ptr, len_array).i32
676end
677
678[['ldarr', :i32], ['ldarr_64', :i64], ['fldarr_64', :f64], ['fldarr_32', :f32]].each do |name, type|
679  macro(:"handle_#{name}_v8") do |vs|
680    ss := SaveState()
681    # TODO(aantipina): add assert(has_object(vs))
682    null_check := NullCheck(vs.ref, ss).ref
683    len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
684    bounds_check := BoundsCheck(len_array, acc_value.i32, ss).i32
685    load_array := LoadArray(null_check, bounds_check).send(type)
686    set_value(acc_ptr, load_array).send(type)
687  end
688end
689
690[8, 16].each do |size|
691  macro(:"handle_ldarr_#{size}_v8") do |vs|
692    ss := SaveState()
693    # TODO(aantipina): add assert(has_object(vs))
694    null_check := NullCheck(vs.ref, ss).ref
695    len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
696    bounds_check := BoundsCheck(len_array, acc_value.i32, ss).i32
697    load_array := LoadArray(null_check, bounds_check).send(:"i#{size}")
698    set_value(acc_ptr, send(:"i#{size}toi32", load_array)).i32
699  end
700end
701
702[8, 16].each do |size|
703  macro(:"handle_ldarru_#{size}_v8") do |vs|
704    ss := SaveState()
705    # TODO(aantipina): add assert(has_object(vs))
706    null_check := NullCheck(vs.ref, ss).ref
707    len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
708    bounds_check := BoundsCheck(len_array, acc_value.i32, ss).i32
709    load_array := LoadArray(null_check, bounds_check).send(:"u#{size}")
710    set_value(acc_ptr, send(:"u#{size}tou32", load_array)).u32
711  end
712end
713
714macro(:handle_ldarr_obj_v8) do |vs|
715  ss := SaveState()
716  # TODO(aantipina): add assert(has_object(vs))
717  null_check := NullCheck(vs.ref, ss).ref
718  len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
719  bounds_check := BoundsCheck(len_array, acc_value.i32, ss).i32
720  load_array := LoadArray(null_check, bounds_check).ref
721  set_acc_object(acc_ptr, load_array).ref
722end
723
724[8, 16].each do |size|
725  macro(:"handle_starr_#{size}_v4_v4") do |vs1, vs2|
726    ss := SaveState()
727    # TODO(aantipina): add assert(has_object(vs1))
728    null_check := NullCheck(vs1.ref, ss).ref
729    len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
730    bounds_check := BoundsCheck(len_array, vs2, ss).i32
731    StoreArray(null_check, bounds_check, acc_value.i32).send(:"i#{size}")
732  end
733end
734
735[['starr', :i32], ['starr_64', :i64], ['starr_obj', :ref], ['fstarr_32', :f32], ['fstarr_64', :f64]].each do |name, type|
736  macro(:"handle_#{name}_v4_v4") do |vs1, vs2|
737    ss := SaveState()
738    # TODO(aantipina): add assert(has_object(vs1))
739    null_check := NullCheck(vs1.ref, ss).ref
740    len_array := LoadI(null_check).Imm(Constants::ARRAY_LENGTH_OFFSET).i32
741    bounds_check := BoundsCheck(len_array, vs2.i32, ss).i32
742    StoreArray(null_check, bounds_check, acc_value.send(type)).send(type)
743  end
744end
745
746macro(:handle_newobj_v8_id16) do |vd, id|
747  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
748  # TODO(mbolshov): handle returned nullptr
749  object := call_runtime("CreateObjectByIdEntrypoint", %tr, method_ptr, u16tou32(id)).ptr
750  set_object(vd, object).ref
751end
752
753[['', :u32], ['64_', :u64]].each do |name, type|
754  macro(:"handle_stobj_#{name}v8_id16") do |vs, id|
755    # TODO(aantipina): add assert(has_object(vs))
756    method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
757    offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
758    Store(vs, offset, acc_value.send(type)).send(type)
759  end
760end
761
762macro(:handle_stobj_obj_v8_id16) do |vs, id|
763  # TODO(aantipina): add assert(has_object(vs))
764  # TODO(aantipina): add assert(has_object(acc))
765  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
766  offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
767  Store(vs, offset, acc_value.ref).SetNeedBarrier(true).ref
768end
769
770[['', :u32], ['64_', :u64]].each do |name, type|
771  macro(:"handle_stobj_v_#{name}v4_v4_id16") do |v1, v2, id|
772    # TODO(aantipina): add assert(has_object(vs))
773    method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
774    offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
775    Store(v2.ref, offset, v1.send(type)).send(type)
776  end
777end
778
779macro(:handle_stobj_v_obj_v4_v4_id16) do |v1, v2, id|
780  # TODO(aantipina): add assert(has_object(vs))
781  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
782  offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
783  Store(v2.ref, offset, v1.ref).SetNeedBarrier(true).ref
784end
785
786[['', :u32], ['64_', :u64]].each do |name, type|
787  macro(:"handle_ldobj_#{name}v8_id16") do |vs, id|
788    # TODO(aantipina): add assert(has_object(vs))
789    method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
790    offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
791    value := Load(vs, offset).send(type)
792    set_acc_primitive(acc_ptr, value).send(type)
793  end
794end
795
796macro(:handle_ldobj_obj_v8_id16) do |vs, id|
797  # TODO(aantipina): add assert(has_object(vs))
798  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
799  offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
800  value := Load(vs, offset).ref
801  set_acc_object(acc_ptr, value).ref
802end
803
804[['', :u32], ['64_', :u64]].each do |name, type|
805  macro(:"handle_ldobj_v_#{name}v4_v4_id16") do |vd, vs, id|
806    # TODO(aantipina): add assert(has_object(vs))
807    method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
808    offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
809    value := Load(vs, offset).send(type)
810    set_primitive(vd, value).send(type)
811  end
812end
813
814macro(:handle_ldobj_v_obj_v4_v4_id16) do |vd, vs, id|
815  # TODO(aantipina): add assert(has_object(vs))
816  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
817  offset := call_runtime("GetFieldOffsetByIdEntrypoint", method_ptr, u16tou32(id)).word
818  value := Load(vs, offset).ref
819  set_object(vd, value).ref
820end
821
822[['', :u32], ['64_', :u64]].each do |name, type|
823  macro(:"handle_ststatic_#{name}id16") do |id|
824    method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
825    addr := call_runtime("GetStaticFieldAddressByIdEntrypoint", %tr, method_ptr, u16tou32(id)).ptr
826    # TODO(aantipina): add assert(is_static(field))
827    StoreI(addr, acc_value.send(type)).Imm(0).send(type)
828  end
829end
830
831macro(:handle_ststatic_obj_id16) do |id|
832  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
833  addr := call_runtime("GetStaticFieldAddressByIdEntrypoint", %tr, method_ptr, u16tou32(id)).ptr
834  # TODO(aantipina): add assert(is_static(field))
835  StoreI(addr, acc_value.ref).Imm(0).SetNeedBarrier(true).ref
836end
837
838[['', :u32], ['64_', :u64]].each do |name, type|
839  macro(:"handle_ldstatic_#{name}id16") do |id|
840    method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
841    addr := call_runtime("GetStaticFieldAddressByIdEntrypoint", %tr, method_ptr, u16tou32(id)).ptr
842    # TODO(aantipina): add assert(is_static(field))
843    value := LoadI(addr).Imm(0).send(type)
844    set_acc_primitive(acc_ptr, value).send(type)
845  end
846end
847
848macro(:handle_ldstatic_obj_id16) do |id|
849  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
850  addr := call_runtime("GetStaticFieldAddressByIdEntrypoint", %tr, method_ptr, u16tou32(id)).ptr
851  # TODO(aantipina): add assert(is_static(field))
852  value := LoadI(addr).Imm(0).ref
853  set_acc_object(acc_ptr, value).ref
854end
855
856macro(:handle_isinstance_id16) do |id|
857  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
858  # TODO(aantipina): add assert(has_object(acc))
859  res := call_runtime("IsInstanceByBCIDEntrypoint", method_ptr, acc_value.u64, u16tou32(id)).u8
860  set_acc_primitive(acc_ptr, u8tou32(res)).u32
861end
862
863macro(:handle_checkcast_id16) do |id|
864  method_ptr := LoadI(%frame).Imm(Constants::FRAME_METHOD_OFFSET).ptr
865  # TODO(aantipina): add assert(has_object(acc))
866  call_runtime("CheckCastByBCIDEntrypoint", method_ptr, acc_value.u64, u16tou32(id)).void
867end
868
869macro(:handle_sta_obj_v8) do |vd|
870  # TODO(aantipina): add assert(has_object(acc))
871  set_object(vd, acc_value.ref).ref
872end
873
874macro(:handle_lda_obj_v8) do |vs|
875  # TODO(aantipina): add assert(has_object(acc))
876  set_acc_object(acc_ptr, vs).ref
877end
878
879macro(:handle_mov_null_v8) do |vd|
880  set_object(vd, 0).ref
881end
882
883macro(:handle_lda_null) do
884  set_acc_object(acc_ptr, 0).ref
885end
886
887['eq', 'ne', 'lt', 'gt', 'le', 'ge'].each do |cc|
888  ['8', '16'].each do |from|
889    macro(:"handle_j#{cc}_v8_imm#{from}") do |pc, vs, imm, size|
890      acc := acc_value.i32
891      If(acc, vs).CC(:"cc_#{cc}".upcase).b {
892        to_imm = as_imm(imm)
893        imm_casted = Cast(to_imm).SrcType("DataType::INT#{from}").i32
894        pc1 := advance_pc_var(pc, i32tou64(imm_casted))
895      } Else {
896        pc2 := advance_pc_imm(pc, size)
897      }
898      Phi(pc1, pc2).ptr
899    end
900  end
901end
902
903['ne', 'eq', 'lt', 'gt', 'le', 'ge'].each do |cc|
904  ['8', '16'].each do |from|
905    macro(:"handle_j#{cc}z_imm#{from}") do |pc, imm, size|
906      If(acc_value.i32, 0).CC(:"cc_#{cc}".upcase).b {
907        to_imm = as_imm(imm)
908        imm_casted = Cast(to_imm).SrcType("DataType::INT#{from}").i32
909        pc1 := advance_pc_var(pc, i32tou64(imm_casted))
910      } Else {
911        pc2 := advance_pc_imm(pc, size)
912      }
913      Phi(pc1, pc2).ptr
914    end
915  end
916end
917
918# TODO: fix fcmp for nans
919macro(:"handle_fcmpg_v8") do |vs|
920  v_ := Cmp(acc_value.f32, vs).SrcType("DataType::FLOAT32").Fcmpg(true).i32
921  set_acc_primitive(acc_ptr, v_).i32
922end
923
924macro(:"handle_fcmpg_64_v8") do |vs|
925  v_ := Cmp(acc_value.f64, vs).SrcType("DataType::FLOAT64").Fcmpg(true).i32
926  set_acc_primitive(acc_ptr, v_).i32
927end
928
929macro(:"handle_fcmpl_v8") do |vs|
930  v_ := Cmp(acc_value.f32, vs).i32
931  set_acc_primitive(acc_ptr, v_).i32
932end
933
934macro(:"handle_fcmpl_64_v8") do |vs|
935  v_ := Cmp(acc_value.f64, vs).i32
936  set_acc_primitive(acc_ptr, v_).i32
937end
938
939['ne', 'eq'].each do |cc|
940  ['8', '16'].each do |from|
941    macro(:"handle_j#{cc}_obj_v8_imm#{from}") do |pc, vs, imm, size|
942      # TODO(aantipina): add assert(has_object(acc))
943      # TODO(aantipina): add assert(has_object(vs))
944      If(vs, acc_value.ref).CC(:"cc_#{cc}".upcase).b {
945        to_imm = as_imm(imm)
946        imm_casted = Cast(to_imm).SrcType("DataType::INT#{from}").i32
947        pc1 := advance_pc_var(pc, i32tou64(imm_casted))
948      } Else {
949        pc2 := advance_pc_imm(pc, size)
950      }
951      Phi(pc1, pc2).ptr
952    end
953  end
954end
955
956['ne', 'eq'].each do |cc|
957  ['8', '16'].each do |from|
958    macro(:"handle_j#{cc}z_obj_imm#{from}") do |pc, imm, size|
959      If(acc_value.ref, 0).CC(:"cc_#{cc}".upcase).b {
960        to_imm = as_imm(imm)
961        imm_casted = Cast(to_imm).SrcType("DataType::INT#{from}").i32
962        pc1 := advance_pc_var(pc, i32tou64(imm_casted))
963      } Else {
964        pc2 := advance_pc_imm(pc, size)
965      }
966      Phi(pc1, pc2).ptr
967    end
968  end
969end
970
971# Conversions from integer types to u1
972
973['i32', 'i64', 'u32', 'u64'].each do |from|
974  macro(:"handle_#{from}tou1") do
975    value := acc_value.send(:"#{from}")
976    set_value(acc_ptr, 0).send(:"#{from}")
977    If(value, 0).CC(:CC_NE).b {
978      set_value(acc_ptr, 1).u32
979    }
980  end
981end
982
983# Integer truncations and extensions
984
985['i32', 'u32'].each do |from|
986  macro(:"handle_#{from}toi64") do
987    set_value(acc_ptr, send(:"#{from}toi64", acc_value.send(:"#{from}"))).i64
988  end
989end
990
991['i32', 'u32'].each do |from|
992  ['i16', 'u16', 'i8', 'u8'].each do |to|
993    macro(:"handle_#{from}to#{to}") do
994      value := send(:"#{from}to#{to}", acc_value.send(:"#{from}"))
995      to_expanded = to.gsub(/\d+/,"32")
996      value_expanded := send(:"#{to}to#{to_expanded}", value)
997      set_value(acc_ptr, value_expanded).send(:"#{to_expanded}")
998    end
999  end
1000end
1001
1002
1003
1004macro(:handle_i64toi32) do
1005  set_value(acc_ptr, i64toi32(acc_value.i64)).i32
1006end
1007
1008['i32', 'u32'].each do |to|
1009  macro(:"handle_u64to#{to}") do
1010    set_value(acc_ptr, send(:"u64to#{to}", acc_value.u64)).send(:"#{to}")
1011  end
1012end
1013
1014# Conversions between integer and floating point types
1015
1016['i32', 'u32', 'i64', 'u64'].each do |from|
1017  ['f32', 'f64'].each do |to|
1018    macro(:"handle_#{from}to#{to}") do
1019      set_value(acc_ptr, send(:"#{from}to#{to}", acc_value.send(:"#{from}"))).send("#{to}")
1020    end
1021  end
1022end
1023
1024['f64', 'i32', 'i64', 'u32', 'u64'].each do |to|
1025  macro(:"handle_f32to#{to}") do
1026    set_value(acc_ptr, send(:"f32to#{to}", acc_value.f32)).send(:"#{to}")
1027  end
1028end
1029
1030['i32', 'i64', 'u32', 'u64', 'f32'].each do |to|
1031  macro(:"handle_f64to#{to}") do
1032    set_value(acc_ptr, send("f64to#{to}", acc_value.f64)).send("#{to}")
1033  end
1034end
1035
1036macro(:handle_mov_64) do |vd, vs|
1037  set_primitive(vd, vs).u64
1038end
1039
1040macro(:handle_mov_obj) do |vd, vs|
1041  # TODO(mgonopolskiy): add assert(has_object(vs))
1042  set_object(vd, vs).ref
1043end
1044
1045macro(:handle_lda_64) do |vs|
1046  set_acc_primitive(acc_ptr, vs).u64
1047end
1048
1049macro(:handle_sta_64_v8) do |vd|
1050  set_primitive(vd, acc_value.u64).u64
1051end
1052
1053macro(:handle_i32tof64) do
1054  set_value(acc_ptr, i32tof64(acc_value.i32)).f64
1055end
1056
1057macro(:handle_fmovi_v8_imm) do |vd, imm|
1058  set_primitive(vd, imm).f32
1059end
1060
1061macro(:handle_fmovi_64_v8_imm) do |vd, imm|
1062  set_primitive(vd, imm).f64
1063end
1064
1065macro(:handle_fadd2_64_v8) do |vs|
1066  v_ := Add(acc_value.f64, vs).f64
1067  set_value(acc_ptr, v_).f64
1068end
1069
1070macro(:handle_fsub2_v8) do |vs|
1071  v_ := Sub(acc_value.f32, vs).f32
1072  set_value(acc_ptr, v_).f32
1073end
1074
1075macro(:handle_fsub2_64_v8) do |vs|
1076  v_ := Sub(acc_value.f64, vs).f64
1077  set_value(acc_ptr, v_).f64
1078end
1079
1080macro(:handle_fmul2_v8) do |vs|
1081  v_ := Mul(acc_value.f32, vs).f32
1082  set_value(acc_ptr, v_).f32
1083end
1084
1085macro(:handle_fmul2_64_v8) do |vs|
1086  v_ := Mul(acc_value.f64, vs).f64
1087  set_value(acc_ptr, v_).f64
1088end
1089
1090macro(:handle_fmod2_v8) do |vs|
1091  v_ := call_runtime("fmodf", acc_value.f32, vs).f32
1092  set_value(acc_ptr, v_).f32
1093end
1094
1095macro(:handle_fmod2_64_v8) do |vs|
1096  v_ := call_runtime("fmod", acc_value.f64, vs).f64
1097  set_value(acc_ptr, v_).f64
1098end
1099
1100macro(:handle_fdiv2_v8) do |vs|
1101  v_ := Div(acc_value.f32, vs).f32
1102  set_value(acc_ptr, v_).f32
1103end
1104
1105macro(:handle_fdiv2_64_v8) do |vs|
1106  v_ := Div(acc_value.f64, vs).f64
1107  set_value(acc_ptr, v_).f64
1108end
1109
1110[['', :f32], ['_64', :f64]].each do |name, type|
1111  macro(:"handle_fneg#{name}") do
1112    v_ := Neg(acc_value.send(type)).send(type)
1113    set_value(acc_ptr, v_).send(type)
1114  end
1115end
1116
1117['initobj', 'call', 'call_virt'].each do |op|
1118  macro(:"handle_#{op}_short_v4_v4_id16") do |v1, v2, id, size|
1119    receiver = vreg_value(v1).ref if op.include?('virt')
1120    generic_call(id, size, op == 'initobj', receiver, 2, lambda do |new_frame, num_vregs, _|
1121      copy_reg(new_frame, num_vregs, v1)
1122      copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2)
1123    end)
1124  end
1125end
1126
1127['call', 'call_virt'].each do |op|
1128  macro(:"handle_#{op}_acc_short_v4_imm4_id16") do |v, imm, id, size|
1129    receiver = acc_receiver(v, imm).ref if op.include?('virt')
1130    generic_call(id, size, false, receiver, 2, lambda do |new_frame, num_vregs, _|
1131      If(imm, 0).CC(:CC_EQ).b {
1132        copy_acc(frame_vreg_ptr(new_frame, num_vregs), acc_ptr)
1133        copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v)
1134      } Else {
1135        copy_reg(new_frame, num_vregs, v)
1136        copy_acc(frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(1).word), acc_ptr)
1137      }
1138    end)
1139  end
1140end
1141
1142['call', 'call_virt'].each do |op|
1143  macro(:"handle_#{op}_acc_v4_v4_v4_imm4_id16") do |v1, v2, v3, imm, id, size|
1144    receiver = acc_receiver(v1, imm).ref if op.include?('virt')
1145    generic_call(id, size, false, receiver, 4, lambda do |new_frame, num_vregs, _|
1146      If(imm, 0).CC(:CC_EQ).b {
1147        copy_acc(frame_vreg_ptr(new_frame, num_vregs), acc_ptr)
1148        copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v1)
1149        copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v2)
1150        copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3)
1151      } Else {
1152        If(imm, 1).CC(:CC_EQ).b {
1153          copy_reg(new_frame, num_vregs, v1)
1154          copy_acc(frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(1).word), acc_ptr)
1155          copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v2)
1156          copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3)
1157        } Else {
1158          If(imm, 2).CC(:CC_EQ).b {
1159            copy_reg(new_frame, num_vregs, v1)
1160            copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2)
1161            copy_acc(frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(2).word), acc_ptr)
1162            copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v3)
1163          } Else {
1164            # TODO(mbolshov): assert imm==3
1165            copy_reg(new_frame, num_vregs, v1)
1166            copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2)
1167            copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v3)
1168            copy_acc(frame_vreg_ptr(new_frame, AddI(num_vregs).Imm(3).word), acc_ptr)
1169          }
1170        }
1171      }
1172    end)
1173  end
1174end
1175
1176['initobj', 'call', 'call_virt'].each do |op|
1177  macro(:"handle_#{op}_v4_v4_v4_v4_id16") do |v1, v2, v3, v4, id, size|
1178    receiver = vreg_value(v1).ref if op.include?('virt')
1179    generic_call(id, size, op == 'initobj', receiver, 4, lambda do |new_frame, num_vregs, _|
1180      copy_reg(new_frame, num_vregs, v1)
1181      copy_reg(new_frame, AddI(num_vregs).Imm(1).word, v2)
1182      copy_reg(new_frame, AddI(num_vregs).Imm(2).word, v3)
1183      copy_reg(new_frame, AddI(num_vregs).Imm(3).word, v4)
1184    end)
1185  end
1186end
1187
1188['initobj', 'call', 'call_virt'].each do |op|
1189  macro(:"handle_#{op}_range_v8_id16") do |v, id, size|
1190    receiver = vreg_value(v).ref if op.include?('virt')
1191    generic_call(id, size, op == 'initobj', receiver, nil, lambda do |new_frame, num_vregs, num_args|
1192      dst_ptr_0 := frame_vreg_ptr(new_frame, num_vregs)
1193      src_ptr_0 := vreg_ptr(v)
1194      i0 := 0
1195      Label(:Head)  # TODO(mbolshov): use While loops when they are ready
1196      i := Phi(i0, i1).word
1197      If(i, num_args).CC(:CC_EQ) do
1198        Goto(:Exit)
1199      end
1200      offset := Mul(i, Constants::VREGISTER_SIZE).word
1201      dst_ptr := Add(dst_ptr_0, offset).ptr
1202      src_ptr := Add(src_ptr_0, offset).ptr
1203      set_value(dst_ptr, get_value(src_ptr))
1204      set_tag_frame(new_frame, dst_ptr, get_tag(src_ptr))
1205      i1 := Add(i, 1).word
1206      Goto(:Head)
1207      Label(:Exit)
1208    end)
1209  end
1210end
1211
1212[:handle_return, :handle_return_64, :handle_return_obj].each do |handler|
1213  macro(handler) do
1214    generic_return(lambda { |prev_frame| copy_acc(acc_ptr_frame(prev_frame), acc_ptr) })
1215  end
1216end
1217
1218macro(:handle_return_void) do
1219  generic_return(lambda { |_| })
1220end
1221
1222['enter', 'exit'].each do |op|
1223  macro(:"handle_monitor#{op}") do
1224    # TODO(mbolshov): ref to c-pointer cast
1225    call_runtime("ObjectMonitor#{op.capitalize}", u32toword(acc_value.u32)).void
1226  end
1227end
1228
1229include_plugin 'interpreter_handlers'
1230
1231# Functions:
1232
1233function(:ExecuteImplFast,
1234         params: { 'tr' => 'ptr', 'pc' => 'ptr', 'frame' => 'ptr', 'dispatch_table' => 'ptr' },
1235         regmap: handler_regmap,
1236         regalloc_set: $panda_mask,
1237         mode: [:InterpreterEntry],
1238         validate: InterpreterValidation) do
1239  # Arm32 is not supported
1240  if Options.arch == :arm32
1241    Intrinsic(:UNREACHABLE).void
1242    next
1243  end
1244
1245  # Setup registers according to internal interpreter calling convention:
1246  LiveOut(tr).DstReg(regmap[:tr]).ptr
1247  LiveOut(frame).DstReg(regmap[:frame]).ptr
1248
1249  dispatch(dispatch_table, pc)
1250  Intrinsic(:UNREACHABLE).void if defines.DEBUG
1251  ReturnVoid()
1252end
1253
1254Panda.instructions.each do |i|
1255  op = i.operands # alias for brevity
1256  mode = [:Interpreter]
1257  mode.push(:DynamicMethod) if (i.namespace == "ecmascript" || i.properties.include?("dynamic"))
1258  lang =  i.namespace == "core" ? "PANDA_ASSEMBLY" : i.namespace.upcase()
1259
1260  function("HANDLE_FAST_#{i.handler_name}",
1261           regmap: handler_regmap,
1262           regalloc_set: $panda_mask,
1263           mode: mode,
1264           lang: lang,
1265           validate: InterpreterValidation) do
1266    # Arm32 is not supported
1267    if Options.arch == :arm32
1268      Intrinsic(:UNREACHABLE).void
1269      next
1270    end
1271
1272    call_runtime("DebugPrintEntrypoint", %frame, %pc).void if defines.DEBUG
1273
1274    pc := %pc
1275    table := %dispatch
1276    frame := %frame
1277    tr := %tr
1278
1279    case i.handler_name
1280    when "NOP"
1281    # mov
1282    when "MOVI_V4_IMM4", "MOVI_V8_IMM8"
1283      handle_movi(vreg_ptr(op[0]), i8toi32(as_imm(op[1])))
1284    when "MOVI_V8_IMM16"
1285      handle_movi(vreg_ptr(op[0]), i16toi32(as_imm(op[1])))
1286    when "MOVI_V8_IMM32"
1287      handle_movi(vreg_ptr(op[0]), as_imm(op[1]))
1288    when "MOVI_64_V8_IMM64"
1289      handle_movi_64(vreg_ptr(op[0]), as_imm(op[1]))
1290    when "MOV_V4_V4", "MOV_V8_V8", "MOV_V16_V16"
1291      handle_mov(vreg_ptr(op[0]), vreg_value(op[1]).u32)
1292    when "MOV_64_V4_V4", "MOV_64_V16_V16"
1293      handle_mov_64(vreg_ptr(op[0]), vreg_value(op[1]).u64)
1294    when "MOV_OBJ_V4_V4", "MOV_OBJ_V8_V8", "MOV_OBJ_V16_V16"
1295      handle_mov_obj(vreg_ptr(op[0]), vreg_value(op[1]).ref)
1296    when "MOV_NULL_V8"
1297      handle_mov_null_v8(vreg_ptr(op[0]))
1298    when "FMOVI_PREF_V8_IMM32"
1299      handle_fmovi_v8_imm(vreg_ptr(op[0]), as_imm(op[1]))
1300    when "FMOVI_64_V8_IMM64"
1301      handle_fmovi_64_v8_imm(vreg_ptr(op[0]).ptr, as_imm(op[1]).f64)
1302    # lda
1303    when "LDA_V8"
1304      handle_lda(vreg_value(op[0]).u32)
1305    when "LDA_64_V8"
1306      handle_lda_64(vreg_value(op[0]).u64)
1307    when "LDA_OBJ_V8"
1308      handle_lda_obj_v8(vreg_value(op[0]).ref)
1309    when "LDA_STR_ID32"
1310      handle_lda_str_id32(as_id(op[0]))
1311    when "LDA_TYPE_ID16"
1312      handle_lda_type_id16(as_id(op[0]))
1313    when "LDA_CONST_V8_ID32"
1314      handle_lda_const_v8_id32(vreg_ptr(op[0]), as_id(op[1]))
1315    when "LDAI_IMM8"
1316      handle_ldai_imm(i8toi32(as_imm(op[0])))
1317    when "LDAI_IMM16"
1318      handle_ldai_imm(i16toi32(as_imm(op[0])))
1319    when "LDAI_IMM32"
1320      handle_ldai_imm(as_imm(op[0]))
1321    when "LDAI_64_IMM64"
1322      handle_ldai_64_imm(as_imm(op[0]))
1323    when "FLDAI_PREF_IMM32"
1324      handle_fldai_imm(as_imm(op[0]))
1325    when "FLDAI_64_IMM64"
1326      handle_fldai_64_imm(as_imm(op[0]))
1327    when "LDA_NULL"
1328      handle_lda_null()
1329    when "LENARR_V8"
1330      handle_lenarr_v8(vreg_value(op[0]))
1331    when "LDARR_V8"
1332      handle_ldarr_v8(vreg_value(op[0]))
1333    when "LDARR_8_V8"
1334      handle_ldarr_8_v8(vreg_value(op[0]))
1335    when "LDARR_16_V8"
1336      handle_ldarr_16_v8(vreg_value(op[0]))
1337    when "LDARRU_8_V8"
1338      handle_ldarru_8_v8(vreg_value(op[0]))
1339    when "LDARRU_16_V8"
1340      handle_ldarru_16_v8(vreg_value(op[0]))
1341    when "LDARR_64_V8"
1342      handle_ldarr_64_v8(vreg_value(op[0]))
1343    when "FLDARR_32_V8"
1344      handle_fldarr_32_v8(vreg_value(op[0]))
1345    when "FLDARR_64_V8"
1346      handle_fldarr_64_v8(vreg_value(op[0]))
1347    when "LDARR_OBJ_V8"
1348      handle_ldarr_obj_v8(vreg_value(op[0]))
1349    when "LDOBJ_V8_ID16"
1350      handle_ldobj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
1351    when "LDOBJ_V_V4_V4_ID16"
1352      handle_ldobj_v_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
1353    when "LDOBJ_64_V8_ID16"
1354      handle_ldobj_64_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
1355    when "LDOBJ_V_64_V4_V4_ID16"
1356      handle_ldobj_v_64_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
1357    when "LDOBJ_OBJ_V8_ID16"
1358      handle_ldobj_obj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
1359    when "LDOBJ_V_OBJ_V4_V4_ID16"
1360      handle_ldobj_v_obj_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]).ref, as_id(op[2]))
1361    when "LDSTATIC_ID16"
1362      handle_ldstatic_id16(as_id(op[0]))
1363    when "LDSTATIC_64_ID16"
1364      handle_ldstatic_64_id16(as_id(op[0]))
1365    when "LDSTATIC_OBJ_ID16"
1366      handle_ldstatic_obj_id16(as_id(op[0]))
1367    # sta
1368    when "STA_V8"
1369      handle_sta_v8(vreg_ptr(op[0]))
1370    when "STA_64_V8"
1371      handle_sta_64_v8(vreg_ptr(op[0]))
1372    when "STA_OBJ_V8"
1373      handle_sta_obj_v8(vreg_ptr(op[0]))
1374    when "STARR_V4_V4"
1375      handle_starr_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1376    when "STARR_8_V4_V4"
1377      handle_starr_8_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1378    when "STARR_16_V4_V4"
1379      handle_starr_16_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1380    when "STARR_64_V4_V4"
1381      handle_starr_64_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1382    when "FSTARR_32_V4_V4"
1383      handle_fstarr_32_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1384    when "FSTARR_64_V4_V4"
1385      handle_fstarr_64_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1386    when "STARR_OBJ_V4_V4"
1387      handle_starr_obj_v4_v4(vreg_value(op[0]), vreg_value(op[1]).i32)
1388    when "STOBJ_V8_ID16"
1389      handle_stobj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
1390    when "STOBJ_64_V8_ID16"
1391      handle_stobj_64_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
1392    when "STOBJ_OBJ_V8_ID16"
1393      handle_stobj_obj_v8_id16(vreg_value(op[0]).ref, as_id(op[1]))
1394    when "STOBJ_V_V4_V4_ID16"
1395      handle_stobj_v_v4_v4_id16(vreg_value(op[0]), vreg_value(op[1]), as_id(op[2]))
1396    when "STOBJ_V_64_V4_V4_ID16"
1397      handle_stobj_v_64_v4_v4_id16(vreg_value(op[0]), vreg_value(op[1]), as_id(op[2]))
1398    when "STOBJ_V_OBJ_V4_V4_ID16"
1399      handle_stobj_v_obj_v4_v4_id16(vreg_value(op[0]), vreg_value(op[1]), as_id(op[2]))
1400    when "STSTATIC_ID16"
1401      handle_ststatic_id16(as_id(op[0]))
1402    when "STSTATIC_64_ID16"
1403      handle_ststatic_64_id16(as_id(op[0]))
1404    when "STSTATIC_OBJ_ID16"
1405      handle_ststatic_obj_id16(as_id(op[0]))
1406    # jmp
1407    when "JMP_IMM8"
1408      pc := handle_jmp_imm(pc, i8toi32(as_imm(op[0])))
1409    when "JMP_IMM16"
1410      pc := handle_jmp_imm(pc, i16toi32(as_imm(op[0])))
1411    when "JMP_IMM32"
1412      pc := handle_jmp_imm(pc, as_imm(op[0]))
1413    # conditional jumps
1414    # NB! Better not to load jump offset when condition is false
1415    when "JEQ_V8_IMM8"
1416      pc := handle_jeq_v8_imm8(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1417    when "JEQ_V8_IMM16"
1418      pc := handle_jeq_v8_imm16(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1419    when "JNE_V8_IMM8"
1420      pc := handle_jne_v8_imm8(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1421    when "JNE_V8_IMM16"
1422      pc := handle_jne_v8_imm16(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1423    when "JLT_V8_IMM8"
1424      pc := handle_jlt_v8_imm8(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1425    when "JLT_V8_IMM16"
1426      pc := handle_jlt_v8_imm16(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1427    when "JGT_V8_IMM8"
1428      pc := handle_jgt_v8_imm8(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1429    when "JGT_V8_IMM16"
1430      pc := handle_jgt_v8_imm16(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1431    when "JLE_V8_IMM8"
1432      pc := handle_jle_v8_imm8(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1433    when "JLE_V8_IMM16"
1434      pc := handle_jle_v8_imm16(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1435    when "JGE_V8_IMM8"
1436      pc := handle_jge_v8_imm8(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1437    when "JGE_V8_IMM16"
1438      pc := handle_jge_v8_imm16(pc, vreg_value(op[0]).i32, op[1], i.format.size)
1439    when "JEQZ_IMM8"
1440      pc := handle_jeqz_imm8(pc, op[0], i.format.size)
1441    when "JEQZ_IMM16"
1442      pc := handle_jeqz_imm16(pc, op[0], i.format.size)
1443    when "JNEZ_IMM8"
1444      pc := handle_jnez_imm8(pc, op[0], i.format.size)
1445    when "JNEZ_IMM16"
1446      pc := handle_jnez_imm16(pc, op[0], i.format.size)
1447    when "JLTZ_IMM8"
1448      pc := handle_jltz_imm8(pc, op[0], i.format.size)
1449    when "JLTZ_IMM16"
1450      pc := handle_jltz_imm16(pc, op[0], i.format.size)
1451    when "JGTZ_IMM8"
1452      pc := handle_jgtz_imm8(pc, op[0], i.format.size)
1453    when "JGTZ_IMM16"
1454      pc := handle_jgtz_imm16(pc, op[0], i.format.size)
1455    when "JLEZ_IMM8"
1456      pc := handle_jlez_imm8(pc, op[0], i.format.size)
1457    when "JLEZ_IMM16"
1458      pc := handle_jlez_imm16(pc, op[0], i.format.size)
1459    when "JGEZ_IMM8"
1460      pc := handle_jgez_imm8(pc, op[0], i.format.size)
1461    when "JGEZ_IMM16"
1462      pc := handle_jgez_imm16(pc, op[0], i.format.size)
1463    when "JNEZ_OBJ_IMM8"
1464      pc := handle_jnez_obj_imm8(pc, op[0], i.format.size)
1465    when "JNEZ_OBJ_IMM16"
1466      pc := handle_jnez_obj_imm16(pc, op[0], i.format.size)
1467    when "JEQZ_OBJ_IMM8"
1468      pc := handle_jeqz_obj_imm8(pc, op[0], i.format.size)
1469    when "JEQZ_OBJ_IMM16"
1470      pc := handle_jeqz_obj_imm16(pc, op[0], i.format.size)
1471    when "JNE_OBJ_V8_IMM8"
1472      pc := handle_jne_obj_v8_imm8(pc, vreg_value(op[0]).ref, op[1], i.format.size)
1473    when "JNE_OBJ_V8_IMM16"
1474      pc := handle_jne_obj_v8_imm16(pc, vreg_value(op[0]).ref, op[1], i.format.size)
1475    when "JEQ_OBJ_V8_IMM8"
1476      pc := handle_jeq_obj_v8_imm8(pc, vreg_value(op[0]).ref, op[1], i.format.size)
1477    when "JEQ_OBJ_V8_IMM16"
1478      pc := handle_jeq_obj_v8_imm16(pc, vreg_value(op[0]).ref, op[1], i.format.size)
1479    # cmp
1480    when "FCMPG_PREF_V8"
1481      handle_fcmpg_v8(vreg_value(op[0]).f32)
1482    when "FCMPG_64_V8"
1483      handle_fcmpg_64_v8(vreg_value(op[0]).f64)
1484    when "FCMPL_PREF_V8"
1485      handle_fcmpl_v8(vreg_value(op[0]).f32)
1486    when "FCMPL_64_V8"
1487      handle_fcmpl_64_v8(vreg_value(op[0]).f64)
1488    when "UCMP_PREF_V8"
1489      handle_cmp(acc_value.u32, vreg_value(op[0]).u32)
1490    when "UCMP_64_PREF_V8"
1491      handle_cmp(acc_value.u64, vreg_value(op[0]).u64)
1492    when "CMP_64_V8"
1493      handle_cmp(acc_value.i64, vreg_value(op[0]).i64)
1494    # add
1495    when "ADD_V4_V4"
1496      handle_add_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1497    when "INCI_V4_IMM4"
1498      handle_inci_v4_imm4(vreg_ptr(op[0]), i8toi32(as_imm(op[1])))
1499    when "ADDI_IMM8"
1500      handle_addi_imm(i8toi32(as_imm(op[0])))
1501    when "ADD2_V8"
1502      handle_add2_v8(vreg_value(op[0]).i32)
1503    when "ADD2_64_V8"
1504      handle_add2_64_v8(vreg_value(op[0]).i64)
1505    when "FADD2_64_V8"
1506      handle_fadd2_64_v8(vreg_value(op[0]).f64)
1507    when "FADD2_PREF_V8"
1508      handle_fadd2_v8(vreg_value(op[0]).f32)
1509    # sub
1510    when "FSUB2_PREF_V8"
1511      handle_fsub2_v8(vreg_value(op[0]).f32)
1512    when "SUB_V4_V4"
1513      handle_sub_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1514    when "SUB2_V8"
1515      handle_sub2_v8(vreg_value(op[0]).i32)
1516    when "SUB2_64_V8"
1517      handle_sub2_64_v8(vreg_value(op[0]).i64)
1518    when "SUBI_IMM8"
1519      handle_subi_imm(i8toi32(as_imm(op[0])))
1520    when "FSUB2_64_V8"
1521      handle_fsub2_64_v8(vreg_value(op[0]).f64)
1522    when "SUB2_V8"
1523      handle_sub2_v8(vreg_value(op[0]).i32)
1524    when "FSUB2_64_V8"
1525      handle_fsub2_64_v8(vreg_value(op[0]).f64)
1526    # mul
1527    when "MUL_V4_V4"
1528      handle_mul_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1529    when "MUL2_V8"
1530      handle_mul2_v8(vreg_value(op[0]).i32)
1531    when "FMUL2_PREF_V8"
1532      handle_fmul2_v8(vreg_value(op[0]).f32)
1533    when "MUL2_64_V8"
1534      handle_mul2_64_v8(vreg_value(op[0]).i64)
1535    when "MULI_IMM8"
1536      handle_muli_imm(i8toi32(as_imm(op[0])))
1537    when "FMUL2_64_V8"
1538      handle_fmul2_64_v8(vreg_value(op[0]).f64)
1539    # div
1540    when "FDIV2_PREF_V8"
1541      handle_fdiv2_v8(vreg_value(op[0]).f32)
1542    when "FDIV2_64_V8"
1543      handle_fdiv2_64_v8(vreg_value(op[0]).f64)
1544    when "DIV_V4_V4"
1545      handle_div_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1546    when "DIV2_V8"
1547      handle_div2_v8(vreg_value(op[0]).i32)
1548    when "DIVI_IMM8"
1549      handle_divi_imm(i8toi32(as_imm(op[0])))
1550    when "DIV2_64_V8"
1551      handle_div2_64_v8(vreg_value(op[0]).i64)
1552    when "DIVU2_PREF_V8"
1553      handle_divu2_v8(vreg_value(op[0]).i32)
1554    when "DIVU2_64_PREF_V8"
1555      handle_divu2_64_v8(vreg_value(op[0]).i64)
1556    # mod
1557    when "FMOD2_PREF_V8"
1558      handle_fmod2_v8(vreg_value(op[0]).f32)
1559    when "FMOD2_64_V8"
1560      handle_fmod2_64_v8(vreg_value(op[0]).f64)
1561    when "MOD_V4_V4"
1562      handle_mod_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1563    when "MOD2_V8"
1564      handle_mod2_v8(vreg_value(op[0]).i32)
1565    when "MODI_IMM8"
1566      handle_modi_imm(i8toi32(as_imm(op[0])))
1567    when "MOD2_64_V8"
1568      handle_mod2_64_v8(vreg_value(op[0]).i64)
1569    when "MODU2_PREF_V8"
1570      handle_modu2_v8(vreg_value(op[0]).i32)
1571    when "MODU2_64_PREF_V8"
1572      handle_modu2_64_v8(vreg_value(op[0]).i64)
1573    # neg
1574    when "FNEG_64"
1575      handle_fneg_64()
1576    when "FNEG_PREF_NONE"
1577      handle_fneg()
1578    # and
1579    when "AND2_PREF_V8"
1580      handle_and2_v8(vreg_value(op[0]).i64)
1581    when "AND2_64_PREF_V8"
1582      handle_and2_64_v8(vreg_value(op[0]).i64)
1583    when "ANDI_IMM32"
1584      handle_andi_imm(as_imm(op[0]))
1585    when "AND_PREF_V4_V4"
1586      handle_and_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1587    # or
1588    when "OR2_PREF_V8"
1589      handle_or2_v8(vreg_value(op[0]).i32)
1590    when "OR2_64_PREF_V8"
1591      handle_or2_64_v8(vreg_value(op[0]).i64)
1592    when "ORI_IMM32"
1593      handle_ori_imm(as_imm(op[0]))
1594    when "OR_PREF_V4_V4"
1595      handle_or_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1596    when "ASHR2_PREF_V8"
1597      handle_ashr2_v8(vreg_value(op[0]).i32)
1598    when "ASHR2_64_PREF_V8"
1599      handle_ashr2_64_v8(vreg_value(op[0]).i64)
1600    when "ASHRI_IMM8"
1601      handle_ashri_imm(as_imm(op[0]))
1602    when "ASHR_PREF_V4_V4"
1603      handle_ashr_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1604    # shr
1605    when "SHRI_IMM8"
1606      handle_shri_imm(i8toi32(as_imm(op[0])))
1607    when "SHR_PREF_V4_V4"
1608      handle_shr_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1609    when "SHR2_PREF_V8"
1610      handle_shr2_v8(vreg_value(op[0]).i32)
1611    when "SHR2_64_PREF_V8"
1612      handle_shr2_64_v8(vreg_value(op[0]).i64)
1613    # xor
1614    when "XOR2_PREF_V8"
1615      handle_xor2_v8(vreg_value(op[0]).i32)
1616    when "XOR2_64_PREF_V8"
1617      handle_xor2_64_v8(vreg_value(op[0]).i64)
1618    when "XORI_PREF_IMM32"
1619      handle_xori_imm(as_imm(op[0]))
1620    when "XOR_PREF_V4_V4"
1621      handle_xor_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1622    # shl
1623    when "SHLI_IMM8"
1624      handle_shli_imm(i8toi32(as_imm(op[0])))
1625    when "SHL_PREF_V4_V4"
1626      handle_shl_v4_v4(vreg_value(op[0]).i32, vreg_value(op[1]).i32)
1627    when "SHL2_PREF_V8"
1628      handle_shl2_v8(vreg_value(op[0]).i32)
1629    when "SHL2_64_PREF_V8"
1630      handle_shl2_64_v8(vreg_value(op[0]).i64)
1631    when "NOT_PREF_NONE"
1632      handle_not()
1633    when "NOT_64_PREF_NONE"
1634      handle_not_64()
1635    when "NEG"
1636      handle_neg()
1637    when "NEG_64"
1638      handle_neg_64()
1639    # new
1640    when "NEWARR_V4_V4_ID16"
1641      handle_newarr_v4_v4_id16(vreg_ptr(op[0]), vreg_value(op[1]), as_id(op[2]))
1642    when "NEWOBJ_V8_ID16"
1643      handle_newobj_v8_id16(vreg_ptr(op[0]), as_id(op[1]))
1644    # checks
1645    when "ISINSTANCE_ID16"
1646      handle_isinstance_id16(as_id(op[0]))
1647    when "CHECKCAST_ID16"
1648      handle_checkcast_id16(as_id(op[0]))
1649    # cast
1650    when "I32TOU1_PREF_NONE"
1651      handle_i32tou1()
1652    when "I64TOU1_PREF_NONE"
1653      handle_i64tou1()
1654    when "U32TOU1_PREF_NONE"
1655      handle_u32tou1()
1656    when "U64TOU1_PREF_NONE"
1657      handle_u64tou1()
1658    when "I32TOI64_PREF_NONE"
1659      handle_i32toi64()
1660    when "I32TOI16_PREF_NONE"
1661      handle_i32toi16()
1662    when "I32TOU16_PREF_NONE"
1663      handle_i32tou16()
1664    when "I32TOI8_PREF_NONE"
1665      handle_i32toi8()
1666    when "I32TOU8_PREF_NONE"
1667      handle_i32tou8()
1668    when "I64TOI32_PREF_NONE"
1669      handle_i64toi32()
1670    when "U32TOI64_PREF_NONE"
1671      handle_u32toi64()
1672    when "U32TOI16_PREF_NONE"
1673      handle_u32toi16()
1674    when "U32TOU16_PREF_NONE"
1675      handle_u32tou16()
1676    when "U32TOI8_PREF_NONE"
1677      handle_u32toi8()
1678    when "U32TOU8_PREF_NONE"
1679      handle_u32tou8()
1680    when "U64TOI32_PREF_NONE"
1681      handle_u64toi32()
1682    when "U64TOU32_PREF_NONE"
1683      handle_u64tou32()
1684    when "I32TOF32_PREF_NONE"
1685      handle_i32tof32()
1686    when "I32TOF64_PREF_NONE"
1687      handle_i32tof64()
1688    when "U32TOF32_PREF_NONE"
1689      handle_u32tof32()
1690    when "U32TOF64_PREF_NONE"
1691      handle_u32tof64()
1692    when "I64TOF32_PREF_NONE"
1693      handle_i64tof32()
1694    when "I64TOF64_PREF_NONE"
1695      handle_i64tof64()
1696    when "U64TOF32_PREF_NONE"
1697      handle_u64tof32()
1698    when "U64TOF64_PREF_NONE"
1699      handle_u64tof64()
1700    when "F32TOF64_PREF_NONE"
1701      handle_f32tof64()
1702    when "F32TOI32_PREF_NONE"
1703      handle_f32toi32()
1704    when "F32TOI64_PREF_NONE"
1705      handle_f32toi64()
1706    when "F32TOU32_PREF_NONE"
1707      handle_f32tou32()
1708    when "F32TOU64_PREF_NONE"
1709      handle_f32tou64()
1710    when "F64TOI32_PREF_NONE"
1711      handle_f64toi32()
1712    when "F64TOI64_PREF_NONE"
1713      handle_f64toi64()
1714    when "F64TOU32_PREF_NONE"
1715      handle_f64tou32()
1716    when "F64TOU64_PREF_NONE"
1717      handle_f64tou64()
1718    when "F64TOF32_PREF_NONE"
1719      handle_f64tof32()
1720    # call
1721    when "CALL_SHORT_V4_V4_ID16"
1722      handle_call_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
1723    when "CALL_ACC_SHORT_V4_IMM4_ID16"
1724      handle_call_acc_short_v4_imm4_id16(op[1], as_imm(op[2]), as_id(op[0]), i.format.size)
1725    when "CALL_ACC_V4_V4_V4_IMM4_ID16"
1726      handle_call_acc_v4_v4_v4_imm4_id16(op[1], op[2], op[3], as_imm(op[4]), as_id(op[0]), i.format.size)
1727    when "CALL_V4_V4_V4_V4_ID16"
1728      handle_call_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
1729    when "CALL_RANGE_V8_ID16"
1730      handle_call_range_v8_id16(op[1], as_id(op[0]), i.format.size)
1731    when "CALL_VIRT_SHORT_V4_V4_ID16"
1732      handle_call_virt_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
1733    when "CALL_VIRT_ACC_SHORT_V4_IMM4_ID16"
1734      handle_call_virt_acc_short_v4_imm4_id16(op[1], as_imm(op[2]), as_id(op[0]), i.format.size)
1735    when "CALL_VIRT_V4_V4_V4_V4_ID16"
1736      handle_call_virt_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
1737    when "CALL_VIRT_ACC_V4_V4_V4_IMM4_ID16"
1738      handle_call_virt_acc_v4_v4_v4_imm4_id16(op[1], op[2], op[3], as_imm(op[4]), as_id(op[0]), i.format.size)
1739    when "CALL_VIRT_RANGE_V8_ID16"
1740      handle_call_virt_range_v8_id16(op[1], as_id(op[0]), i.format.size)
1741    when "INITOBJ_SHORT_V4_V4_ID16"
1742      handle_initobj_short_v4_v4_id16(op[1], op[2], as_id(op[0]), i.format.size)
1743    when "INITOBJ_V4_V4_V4_V4_ID16"
1744      handle_initobj_v4_v4_v4_v4_id16(op[1], op[2], op[3], op[4], as_id(op[0]), i.format.size)
1745    when "INITOBJ_RANGE_V8_ID16"
1746      handle_initobj_range_v8_id16(op[1], as_id(op[0]), i.format.size)
1747    # return
1748    when "RETURN_VOID"
1749      handle_return_void()
1750    when "RETURN"
1751      handle_return()
1752    when "RETURN_64"
1753      handle_return_64()
1754    when "RETURN_OBJ"
1755      handle_return_obj()
1756    when "MONITORENTER_PREF_NONE"
1757      handle_monitorenter()
1758    when "MONITOREXIT_PREF_NONE"
1759      handle_monitorexit()
1760
1761    # dyn
1762    when "MOV_DYN_V8_V8"
1763      set_value(vreg_ptr(op[0]), vreg_value(op[1]).any).any
1764    when "STA_DYN_V8"
1765      set_value(vreg_ptr(op[0]), acc_value.any).any
1766    when "LDA_DYN_V8"
1767      set_value(acc_ptr, vreg_value(op[0]).any).any
1768    when "LDAI_DYN_IMM32"
1769      set_value(acc_ptr, i32toany(as_imm(op[0]).i32)).any
1770    when "FLDAI_DYN_IMM64"
1771      set_value(acc_ptr, f64toany(as_imm(op[0]).f64)).any
1772    when "RETURN_DYN"
1773      Intrinsic(:INTERPRETER_RETURN).ptr
1774
1775include_plugin 'interpreter_main_loop'
1776
1777    else
1778      Intrinsic(:UNREACHABLE).void
1779    end
1780
1781    if (i.properties & ['jump', 'call', 'return']).empty?
1782      pc := advance_pc_imm(pc, i.format.size)
1783    end
1784
1785    LiveOut(pc).DstReg(regmap[:pc]).u64
1786    LiveOut(table).DstReg(regmap[:dispatch]).u64
1787    LiveOut(frame).DstReg(regmap[:frame]).ptr
1788    LiveOut(tr).DstReg(regmap[:tr]).ptr
1789
1790    dispatch(table, pc)
1791    Intrinsic(:UNREACHABLE).ptr if defines.DEBUG
1792    ReturnVoid()
1793  end
1794end
1795
1796Panda.prefixes.each do |p|
1797  function("HANDLE_FAST_#{p.handler_name}",
1798           regmap: handler_regmap,
1799           regalloc_set: $panda_mask,
1800           mode: [:Interpreter],
1801           validate: InterpreterValidation) do
1802    pc := %pc
1803    table := %dispatch
1804    frame := %frame
1805    tr := %tr
1806
1807    secondary_opcode := readbyte(pc, 1)
1808    offset_idx := AddI(u8toword(secondary_opcode)).Imm(Panda.dispatch_table.secondary_opcode_offset(p)).word
1809    offset := Mul(offset_idx, "WordSize()").word
1810    addr := Load(table, offset).ptr
1811
1812    LiveOut(pc).DstReg(regmap[:pc]).ptr
1813    LiveOut(table).DstReg(regmap[:dispatch]).ptr
1814    LiveOut(frame).DstReg(regmap[:frame]).ptr
1815    LiveOut(tr).DstReg(regmap[:tr]).ptr
1816
1817    IndirectJump(addr)
1818    Intrinsic(:UNREACHABLE).void if defines.DEBUG
1819    ReturnVoid()
1820  end
1821end
1822
1823function(:HANDLE_FAST_INVALID,
1824         regmap: handler_regmap,
1825         regalloc_set: $panda_mask,
1826         mode: [:Interpreter],
1827         validate: InterpreterValidation) do
1828  Intrinsic(:UNREACHABLE).void
1829end
1830