• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env ruby
2
3# Copyright (c) 2021-2025 Huawei Device Co., Ltd.
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16include_relative 'common.irt'
17
18if Options.arch == :arm64
19    # In case of Aarch64 6 registers are enough: use 5 callee regs and 1 temp reg.
20    $monitors_mask = RegMask.new($full_regmap, :tmp1, :callee0, :callee1, :callee2, :callee3, :callee4)
21else
22    $monitors_mask = $panda_mask
23end
24
25function(:MonitorEnterFastPath,
26          params: {monitor: 'ptr'},
27          regmap: $full_regmap,
28          regalloc_set: $monitors_mask,
29          mode: [:FastPath]) {
30    if Options.arch == :arm32
31        Intrinsic(:UNREACHABLE).Terminator.void
32        next
33    end
34
35    # Load native thread id, go to slow path if it is unknown
36    ntid := Cast(LoadI(%tr).Imm(Constants::INTERNAL_THREAD_ID_OFFSET).u32).SrcType("DataType::Type::UINT32").mw
37    If(ntid, 0).EQ.mw.Unlikely { Goto(:SlowPath) }
38
39    # Load MTManagedThread::local_objects_locked_ size and capacity, go to slow path if list should be extended
40    locked_list_capacity := LoadI(%tr).Imm(Constants::LOCKED_OBJECTS_CAPACITY_OFFSET).u32
41    locked_list_size := LoadI(%tr).Imm(Constants::LOCKED_OBJECTS_SIZE_OFFSET).u32
42    If(locked_list_capacity, locked_list_size).LE.u32.Unlikely { Goto(:SlowPath) }
43
44Label(:CasLoop)
45    if Options.arch == :x86_64
46        mark_word := LoadI(monitor).Imm(Constants::MARK_WORD_OFFSET).mw
47    else
48        addr := Add(monitor, Constants::MARK_WORD_OFFSET).ptr
49        mark_word := Intrinsic(:LOAD_ACQUIRE_MARK_WORD_EXCLUSIVE, addr).mw
50    end
51
52    # Go to slow path if mark word's state is neither Unlock, nor Lightweight lock
53    If(And(mark_word, Constants::MARK_WORD_STATUS_MASK).mw, 0).NE.mw.Unlikely { Goto(:SlowPath) }
54
55    # If both thread id and locks count fields are zero then set thread id to current thread's id
56    If(And(mark_word, Constants::MARK_WORD_LWL_THREAD_ID_AND_COUNTER_MASK).mw, 0).EQ.mw.Unlikely {
57        shifted_ntid := Shl(ntid, Constants::MARK_WORD_LWL_THREAD_ID_OFFSET).mw
58        new_mark_word := Or(mark_word, shifted_ntid).mw
59    }
60    phi_mw := Phi(mark_word, new_mark_word).mw
61
62    # Increment locks count by 1
63    final_mark_word := Add(phi_mw, Constants::MARK_WORD_LWL_COUNTER_INC).mw
64    # Check that locks count didn't overflow and go to slow path if it did
65    If(Shr(final_mark_word, Constants::MARK_WORD_LWL_THREAD_ID_OFFSET).mw, ntid).NE.mw.Unlikely {
66        Goto(:SlowPath)
67    }
68    # Try to update mark word, retry on failure
69    if Options.arch == :x86_64
70        addr := Add(monitor, Constants::MARK_WORD_OFFSET).ptr
71        If(Intrinsic(:COMPARE_AND_SET_MARK_WORD, addr, mark_word, final_mark_word).b, 0).EQ.b {
72            Goto(:CasLoop)
73        }
74    else
75        addr := Add(monitor, Constants::MARK_WORD_OFFSET).ptr
76        If(Intrinsic(:STORE_RELEASE_MARK_WORD_EXCLUSIVE, addr, final_mark_word).b, 0).NE.b {
77            Goto(:CasLoop)
78        }
79    end
80
81    # Push new LockObjectInfo instance to MTManagedThread's local_objects_locked_ list
82    locked_objects_base := LoadI(%tr).Imm(Constants::LOCKED_OBJECTS_DATA_OFFSET).ptr
83    locked_objects_offset_u32 := Mul(locked_list_size, Constants::LOCKED_OBJECT_INFO_SIZE).u32
84    if Options.arch_64_bits?
85        locked_objects_offset := Cast(locked_objects_offset_u32).SrcType("DataType::Type::UINT32").u64
86    else
87        locked_objects_offset := locked_objects_offset_u32
88    end
89    locked_objects_addr := Add(locked_objects_base, locked_objects_offset).ptr
90    StoreI(locked_objects_addr, monitor).Imm(Constants::LOCKED_OBJECT_INFO_MONITOR_OFFSET).ptr
91    StoreI(locked_objects_addr, %fp).Imm(Constants::LOCKED_OBJECT_INFO_FRAME_OFFSET).ptr
92
93    # Increment local_objects_locked_ size
94    new_len := Add(locked_list_size, 1).u32
95    StoreI(%tr, new_len).Imm(Constants::LOCKED_OBJECTS_SIZE_OFFSET).u32
96    Goto(:Exit)
97
98Label(:SlowPath)
99    ep_offset = get_entrypoint_offset("MONITOR_ENTER_SLOW_PATH")
100    Intrinsic(:SLOW_PATH_ENTRY, monitor).AddImm(ep_offset).MethodAsImm("MonitorEnterOddSavedBridge").Terminator.void
101    Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
102Label(:Exit)
103    ReturnVoid().void
104}
105
106function(:MonitorExitFastPath,
107          params: {monitor: 'ptr'},
108          regmap: $full_regmap,
109          regalloc_set: $monitors_mask,
110          mode: [:FastPath]) {
111    if Options.arch == :arm32
112        Intrinsic(:UNREACHABLE).Terminator.void
113        next
114    end
115
116    # Load native thread id, go to slow path if it is unknown
117    ntid := Cast(LoadI(%tr).Imm(Constants::INTERNAL_THREAD_ID_OFFSET).u32).SrcType("DataType::Type::UINT32").mw
118    If(ntid, 0).EQ.mw.Unlikely { Goto(:SlowPath) }
119    shifted_ntid := Shl(ntid, Constants::MARK_WORD_LWL_THREAD_ID_OFFSET).mw
120
121Label(:CasLoop)
122    if Options.arch == :x86_64
123        mark_word := LoadI(monitor).Imm(Constants::MARK_WORD_OFFSET).mw
124    else
125        addr := Add(monitor, Constants::MARK_WORD_OFFSET).ptr
126        mark_word := Intrinsic(:LOAD_ACQUIRE_MARK_WORD_EXCLUSIVE, addr).mw
127    end
128
129    # Xor current thread id and mark word's thread id and then check that:
130    # 1) thread id is zero (that means that mark word's thread id equal to current thread id);
131    # 2) mark word state is zero too (it corresponds to Lightweight lock in case of non-zero thread id).
132    # If one of these conditions does not holds go to slow path.
133    If(And(Xor(mark_word, shifted_ntid).mw, Constants::MARK_WORD_STATUS_MASK_AND_LWL_THREAD_ID_MASK).mw, 0).NE.mw.Unlikely {
134        Goto(:SlowPath)
135    }
136
137    # Decrement locks count by 1
138    decremented_mw := SubI(mark_word).Imm(Constants::MARK_WORD_LWL_COUNTER_INC).mw
139
140    # Clear the thread id field if locks count was decremented down to 0
141    If(And(decremented_mw, Constants::MARK_WORD_LWL_COUNTER_MASK).mw, 0).EQ.mw {
142        new_mark_word := Xor(decremented_mw, shifted_ntid).mw
143    }
144    final_mark_word := Phi(decremented_mw, new_mark_word).mw
145
146    # Try to update mark word, retry on failure
147    if Options.arch == :x86_64
148        addr := Add(monitor, Constants::MARK_WORD_OFFSET).ptr
149        If(Intrinsic(:COMPARE_AND_SET_MARK_WORD, addr, mark_word, final_mark_word).b, 0).EQ.b {
150            Goto(:CasLoop)
151        }
152    else
153        addr := Add(monitor, Constants::MARK_WORD_OFFSET).ptr
154        If(Intrinsic(:STORE_RELEASE_MARK_WORD_EXCLUSIVE, addr, final_mark_word).b, 0).NE.b {
155            Goto(:CasLoop)
156        }
157    end
158
159    # Decrement size of MTManagedThread's local_objects_locked_ list
160    locked_list_size := LoadI(%tr).Imm(Constants::LOCKED_OBJECTS_SIZE_OFFSET).u32
161    new_len := SubI(locked_list_size).Imm(1).u32
162    StoreI(%tr, new_len).Imm(Constants::LOCKED_OBJECTS_SIZE_OFFSET).u32
163    Goto(:Exit)
164
165Label(:SlowPath)
166    ep_offset = get_entrypoint_offset("MONITOR_EXIT_SLOW_PATH")
167    Intrinsic(:SLOW_PATH_ENTRY, monitor).AddImm(ep_offset).MethodAsImm("MonitorExitOddSavedBridge").Terminator.void
168    Intrinsic(:UNREACHABLE).Terminator.void if defines.DEBUG
169Label(:Exit)
170    ReturnVoid().void
171}
172