1/** 2 * Copyright (c) 2021-2024 Huawei Device Co., Ltd. 3 * Licensed under the Apache License, Version 2.0 (the "License"); 4 * you may not use this file except in compliance with the License. 5 * You may obtain a copy of the License at 6 * 7 * http://www.apache.org/licenses/LICENSE-2.0 8 * 9 * Unless required by applicable law or agreed to in writing, software 10 * distributed under the License is distributed on an "AS IS" BASIS, 11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 * See the License for the specific language governing permissions and 13 * limitations under the License. 14 */ 15 16#include "arch/asm_support.h" 17#include "arch/amd64/helpers_amd64.S" 18 19.global ExpandCompiledCodeArgsDyn 20TYPE_FUNCTION(ExpandCompiledCodeArgsDyn) 21ExpandCompiledCodeArgsDyn: 22 // %edi - method, preserved 23 // %esi - num_actual, preserved 24 // %edx - num_expected, tmp1 25 // %rcx - tmp2 26 // %r8 - tmp3 27 // %r9 - tmp4 28 29 // %rdx - 8 * (aligned(2) num_expected - num_actual) 30 // %rcx - num_actual_args + 3 (for fp, lr, lr) 31 // %r8 - %rsp before moving 32 subl %esi, %edx 33 negq %rdx 34 andq $-2, %rdx 35 shlq $3, %rdx 36 leal 3(%esi), %ecx 37 movq %rsp, %r8 38 39 // Move sp and fp 40 addq %rdx, %rsp 41 addq %rdx, %rbp 42 43.Lloop_move: 44 movq (%r8), %r9 45 movq %r9, (%r8, %rdx) 46 addq $8, %r8 47 subl $1, %ecx 48 ja .Lloop_move 49 50 movl $TAGGED_VALUE_UNDEFINED, %r9d 51 // Use loop counter as index 52 leaq -8(%r8, %rdx), %r8 53 negl %edx 54.Lloop_init: 55 movq %r9, (%r8, %rdx) 56 subl $8, %edx 57 ja .Lloop_init 58 59 ret 60