• Home
  • Raw
  • Download

Lines Matching refs:rFP

77 #define rFP      %edi  macro
140 movl rFP,offThread_curFrame(\_reg)
144 movl rSELF,rFP
145 movl offThread_pc(rFP),rPC
146 movl offThread_curFrame(rFP),rFP
163 movl rPC, (-sizeofStackSaveArea + offStackSaveArea_currentPc)(rFP)
167 movl (-sizeofStackSaveArea + offStackSaveArea_currentPc)(rFP), rPC
176 leal -sizeofStackSaveArea(rFP), \_reg
251 movl (rFP,\_vreg,4),\_reg
255 movl \_reg,(rFP,\_vreg,4)
259 movl 4*(\_offset)(rFP,\_vreg,4),\_reg
263 movl \_reg,4*(\_offset)(rFP,\_vreg,4)
651 leal (rFP,%ecx,4),%ecx # dst addr
1391 leal (rFP,%ecx,4),%esi # set up src ptr
1613 fldl (rFP,%eax,4)
1614 fldl (rFP,%ecx,4)
1616 flds (rFP,%eax,4)
1617 flds (rFP,%ecx,4)
1647 fldl (rFP,%eax,4)
1648 fldl (rFP,%ecx,4)
1650 flds (rFP,%eax,4)
1651 flds (rFP,%ecx,4)
1681 fldl (rFP,%eax,4)
1682 fldl (rFP,%ecx,4)
1684 flds (rFP,%eax,4)
1685 flds (rFP,%ecx,4)
1714 fldl (rFP,%eax,4)
1715 fldl (rFP,%ecx,4)
1717 flds (rFP,%eax,4)
1718 flds (rFP,%ecx,4)
1753 cmpl 4(rFP,rIBASE,4),%eax
1756 sub (rFP,rIBASE,4),%ecx
1798 cmpl (rFP,rINST,4),%eax # compare (vA, vB)
1831 cmpl (rFP,rINST,4),%eax # compare (vA, vB)
1864 cmpl (rFP,rINST,4),%eax # compare (vA, vB)
1897 cmpl (rFP,rINST,4),%eax # compare (vA, vB)
1930 cmpl (rFP,rINST,4),%eax # compare (vA, vB)
1963 cmpl (rFP,rINST,4),%eax # compare (vA, vB)
1991 cmpl $0,(rFP,rINST,4) # compare (vA, 0)
2020 cmpl $0,(rFP,rINST,4) # compare (vA, 0)
2049 cmpl $0,(rFP,rINST,4) # compare (vA, 0)
2078 cmpl $0,(rFP,rINST,4) # compare (vA, 0)
2107 cmpl $0,(rFP,rINST,4) # compare (vA, 0)
2136 cmpl $0,(rFP,rINST,4) # compare (vA, 0)
4809 flds (rFP,rINST,4) # %st0<- vB
4812 fstps (rFP,%ecx,4) # vA<- %st0
4828 fldl (rFP,rINST,4) # %st0<- vB
4831 fstpl (rFP,%ecx,4) # vA<- %st0
4864 fildl (rFP,rINST,4) # %st0<- vB
4867 fstps (rFP,%ecx,4) # vA<- %st0
4883 fildl (rFP,rINST,4) # %st0<- vB
4886 fstpl (rFP,%ecx,4) # vA<- %st0
4919 fildll (rFP,rINST,4) # %st0<- vB
4922 fstps (rFP,%ecx,4) # vA<- %st0
4938 fildll (rFP,rINST,4) # %st0<- vB
4941 fstpl (rFP,%ecx,4) # vA<- %st0
4962 fldl (rFP,rINST,4) # %st0<- vB
4964 flds (rFP,rINST,4) # %st0<- vB
4974 fistpll (rFP,%ecx,4) # convert and store
4976 fistpl (rFP,%ecx,4) # convert and store
4981 xorl 4(rFP,%ecx,4),%eax
4982 orl (rFP,%ecx,4),%eax
4984 cmpl $0x80000000,(rFP,%ecx,4)
4997 adcl $-1,(rFP,%ecx,4)
4999 adcl $-1,4(rFP,%ecx,4)
5003 movl $0,(rFP,%ecx,4)
5005 movl $0,4(rFP,%ecx,4)
5025 fldl (rFP,rINST,4) # %st0<- vB
5027 flds (rFP,rINST,4) # %st0<- vB
5037 fistpll (rFP,%ecx,4) # convert and store
5039 fistpl (rFP,%ecx,4) # convert and store
5044 xorl 4(rFP,%ecx,4),%eax
5045 orl (rFP,%ecx,4),%eax
5047 cmpl $0x80000000,(rFP,%ecx,4)
5060 adcl $-1,(rFP,%ecx,4)
5062 adcl $-1,4(rFP,%ecx,4)
5066 movl $0,(rFP,%ecx,4)
5068 movl $0,4(rFP,%ecx,4)
5083 flds (rFP,rINST,4) # %st0<- vB
5086 fstpl (rFP,%ecx,4) # vA<- %st0
5107 fldl (rFP,rINST,4) # %st0<- vB
5109 flds (rFP,rINST,4) # %st0<- vB
5119 fistpll (rFP,%ecx,4) # convert and store
5121 fistpl (rFP,%ecx,4) # convert and store
5126 xorl 4(rFP,%ecx,4),%eax
5127 orl (rFP,%ecx,4),%eax
5129 cmpl $0x80000000,(rFP,%ecx,4)
5142 adcl $-1,(rFP,%ecx,4)
5144 adcl $-1,4(rFP,%ecx,4)
5148 movl $0,(rFP,%ecx,4)
5150 movl $0,4(rFP,%ecx,4)
5170 fldl (rFP,rINST,4) # %st0<- vB
5172 flds (rFP,rINST,4) # %st0<- vB
5182 fistpll (rFP,%ecx,4) # convert and store
5184 fistpl (rFP,%ecx,4) # convert and store
5189 xorl 4(rFP,%ecx,4),%eax
5190 orl (rFP,%ecx,4),%eax
5192 cmpl $0x80000000,(rFP,%ecx,4)
5205 adcl $-1,(rFP,%ecx,4)
5207 adcl $-1,4(rFP,%ecx,4)
5211 movl $0,(rFP,%ecx,4)
5213 movl $0,4(rFP,%ecx,4)
5228 fldl (rFP,rINST,4) # %st0<- vB
5231 fstps (rFP,%ecx,4) # vA<- %st0
5320 addl (rFP,%ecx,4),%eax # ex: addl (rFP,%ecx,4),%eax
5344 subl (rFP,%ecx,4),%eax # ex: addl (rFP,%ecx,4),%eax
5362 imull (rFP,%ecx,4),%eax # trashes rIBASE/edx
5462 andl (rFP,%ecx,4),%eax # ex: addl (rFP,%ecx,4),%eax
5486 orl (rFP,%ecx,4),%eax # ex: addl (rFP,%ecx,4),%eax
5510 xorl (rFP,%ecx,4),%eax # ex: addl (rFP,%ecx,4),%eax
5591 addl (rFP,%ecx,4),rIBASE # ex: addl (rFP,%ecx,4),rIBASE
5592 adcl 4(rFP,%ecx,4),%eax # ex: adcl 4(rFP,%ecx,4),%eax
5615 subl (rFP,%ecx,4),rIBASE # ex: addl (rFP,%ecx,4),rIBASE
5616 sbbl 4(rFP,%ecx,4),%eax # ex: adcl 4(rFP,%ecx,4),%eax
5642 SPILL(rFP)
5645 leal (rFP,%eax,4),%esi # esi<- &v[B]
5646 leal (rFP,%ecx,4),rFP # rFP<- &v[C]
5648 imull (rFP),%ecx # ecx<- (Bmsw*Clsw)
5649 movl 4(rFP),%eax # eax<- Cmsw
5652 movl (rFP),%eax # eax<- Clsw
5655 UNSPILL(rFP)
5659 movl rIBASE,4(rFP,rINST,4)# v[B+1]<- rIBASE
5661 movl %eax,(rFP,rINST,4) # v[B]<- %eax
5777 andl (rFP,%ecx,4),rIBASE # ex: addl (rFP,%ecx,4),rIBASE
5778 andl 4(rFP,%ecx,4),%eax # ex: adcl 4(rFP,%ecx,4),%eax
5801 orl (rFP,%ecx,4),rIBASE # ex: addl (rFP,%ecx,4),rIBASE
5802 orl 4(rFP,%ecx,4),%eax # ex: adcl 4(rFP,%ecx,4),%eax
5825 xorl (rFP,%ecx,4),rIBASE # ex: addl (rFP,%ecx,4),rIBASE
5826 xorl 4(rFP,%ecx,4),%eax # ex: adcl 4(rFP,%ecx,4),%eax
5952 flds (rFP,%eax,4) # vCC to fp stack
5953 fadds (rFP,%ecx,4) # ex: faddp
5956 fstps (rFP,rINST,4) # %st to vAA
5972 flds (rFP,%eax,4) # vCC to fp stack
5973 fsubs (rFP,%ecx,4) # ex: faddp
5976 fstps (rFP,rINST,4) # %st to vAA
5992 flds (rFP,%eax,4) # vCC to fp stack
5993 fmuls (rFP,%ecx,4) # ex: faddp
5996 fstps (rFP,rINST,4) # %st to vAA
6012 flds (rFP,%eax,4) # vCC to fp stack
6013 fdivs (rFP,%ecx,4) # ex: faddp
6016 fstps (rFP,rINST,4) # %st to vAA
6026 flds (rFP,%ecx,4) # vCC to fp stack
6027 flds (rFP,%eax,4) # vCC to fp stack
6037 fstps (rFP,%ecx,4) # %st to vAA
6049 movq (rFP, %eax, 4), %xmm0 # %xmm0<- vBB
6050 movq (rFP, %ecx, 4), %xmm1 # %xmm1<- vCC
6054 movq %xmm0, (rFP, rINST, 4) # vAA<- vBB * vCC
6068 movq (rFP, %eax, 4), %xmm0 # %xmm0<- vBB
6069 movq (rFP, %ecx, 4), %xmm1 # %xmm1<- vCC
6073 movq %xmm0, (rFP, rINST, 4) # vAA<- vBB - vCC
6086 movq (rFP, %eax, 4), %xmm0 # %xmm0<- vBB
6087 movq (rFP, %ecx, 4), %xmm1 # %xmm1<- vCC
6091 movq %xmm0, (rFP, rINST, 4) # vAA<- vBB * vCC
6106 fldl (rFP,%eax,4) # vCC to fp stack
6107 fdivl (rFP,%ecx,4) # ex: faddp
6110 fstpl (rFP,rINST,4) # %st to vAA
6120 fldl (rFP,%ecx,4) # vCC to fp stack
6121 fldl (rFP,%eax,4) # vCC to fp stack
6130 fstpl (rFP,rINST,4) # %st to vAA
6152 addl %eax,(rFP,%ecx,4) # for ex: addl %eax,(rFP,%ecx,4)
6177 subl %eax,(rFP,%ecx,4) # for ex: addl %eax,(rFP,%ecx,4)
6192 imull (rFP,%ecx,4),%eax # trashes rIBASE/edx
6294 andl %eax,(rFP,%ecx,4) # for ex: addl %eax,(rFP,%ecx,4)
6319 orl %eax,(rFP,%ecx,4) # for ex: addl %eax,(rFP,%ecx,4)
6344 xorl %eax,(rFP,%ecx,4) # for ex: addl %eax,(rFP,%ecx,4)
6423 addl %eax,(rFP,rINST,4) # example: addl %eax,(rFP,rINST,4)
6424 adcl %ecx,4(rFP,rINST,4) # example: adcl %ecx,4(rFP,rINST,4)
6443 subl %eax,(rFP,rINST,4) # example: addl %eax,(rFP,rINST,4)
6444 sbbl %ecx,4(rFP,rINST,4) # example: adcl %ecx,4(rFP,rINST,4)
6469 SPILL(rFP)
6471 leal (rFP,%eax,4),%esi # %esi<- &v[A]
6472 leal (rFP,rINST,4),rFP # rFP<- &v[B]
6474 imull (rFP),%ecx # ecx<- (Amsw*Blsw)
6475 movl 4(rFP),%eax # eax<- Bmsw
6478 movl (rFP),%eax # eax<- Blsw
6486 UNSPILL(rFP)
6603 andl %eax,(rFP,rINST,4) # example: addl %eax,(rFP,rINST,4)
6604 andl %ecx,4(rFP,rINST,4) # example: adcl %ecx,4(rFP,rINST,4)
6623 orl %eax,(rFP,rINST,4) # example: addl %eax,(rFP,rINST,4)
6624 orl %ecx,4(rFP,rINST,4) # example: adcl %ecx,4(rFP,rINST,4)
6643 xorl %eax,(rFP,rINST,4) # example: addl %eax,(rFP,rINST,4)
6644 xorl %ecx,4(rFP,rINST,4) # example: adcl %ecx,4(rFP,rINST,4)
6759 flds (rFP,%ecx,4) # vAA to fp stack
6761 fadds (rFP,rINST,4) # ex: faddp
6764 fstps (rFP,%ecx,4) # %st to vA
6781 flds (rFP,%ecx,4) # vAA to fp stack
6783 fsubs (rFP,rINST,4) # ex: faddp
6786 fstps (rFP,%ecx,4) # %st to vA
6803 flds (rFP,%ecx,4) # vAA to fp stack
6805 fmuls (rFP,rINST,4) # ex: faddp
6808 fstps (rFP,%ecx,4) # %st to vA
6825 flds (rFP,%ecx,4) # vAA to fp stack
6827 fdivs (rFP,rINST,4) # ex: faddp
6830 fstps (rFP,%ecx,4) # %st to vA
6840 flds (rFP,rINST,4) # vBB to fp stack
6842 flds (rFP,%ecx,4) # vAA to fp stack
6851 fstps (rFP,%ecx,4) # %st to vA
6864 movq (rFP, rINST, 4), %xmm1 # %xmm1<- vB
6865 movq (rFP, %ecx, 4), %xmm0 # %xmm0<- vA
6869 movq %xmm0, (rFP, %ecx, 4) # vA<- %xmm0; result
6883 movq (rFP, rINST, 4), %xmm1 # %xmm1<- vB
6884 movq (rFP, %ecx, 4), %xmm0 # %xmm0<- vA
6888 movq %xmm0, (rFP, %ecx, 4) # vA<- %xmm0; result
6902 movq (rFP, rINST, 4), %xmm1 # %xmm1<- vB
6903 movq (rFP, %ecx, 4), %xmm0 # %xmm0<- vA
6907 movq %xmm0, (rFP, %ecx, 4) # vA<- %xmm0; result
6923 fldl (rFP,%ecx,4) # vAA to fp stack
6925 fdivl (rFP,rINST,4) # ex: faddp
6928 fstpl (rFP,%ecx,4) # %st to vA
6938 fldl (rFP,rINST,4) # vBB to fp stack
6940 fldl (rFP,%ecx,4) # vAA to fp stack
6949 fstpl (rFP,%ecx,4) # %st to vA
8401 movl rFP, OUT_ARG1(%esp)
8427 movl rFP, OUT_ARG1(%esp)
8453 movl rFP, OUT_ARG1(%esp)
8479 movl rFP, OUT_ARG1(%esp)
8505 movl rFP, OUT_ARG1(%esp)
8531 movl rFP, OUT_ARG1(%esp)
8557 movl rFP, OUT_ARG1(%esp)
8583 movl rFP, OUT_ARG1(%esp)
8609 movl rFP, OUT_ARG1(%esp)
8635 movl rFP, OUT_ARG1(%esp)
8661 movl rFP, OUT_ARG1(%esp)
8687 movl rFP, OUT_ARG1(%esp)
8713 movl rFP, OUT_ARG1(%esp)
8739 movl rFP, OUT_ARG1(%esp)
8765 movl rFP, OUT_ARG1(%esp)
8791 movl rFP, OUT_ARG1(%esp)
8817 movl rFP, OUT_ARG1(%esp)
8843 movl rFP, OUT_ARG1(%esp)
8869 movl rFP, OUT_ARG1(%esp)
8895 movl rFP, OUT_ARG1(%esp)
8921 movl rFP, OUT_ARG1(%esp)
8947 movl rFP, OUT_ARG1(%esp)
8973 movl rFP, OUT_ARG1(%esp)
8999 movl rFP, OUT_ARG1(%esp)
9025 movl rFP, OUT_ARG1(%esp)
9051 movl rFP, OUT_ARG1(%esp)
9077 movl rFP, OUT_ARG1(%esp)
9103 movl rFP, OUT_ARG1(%esp)
9129 movl rFP, OUT_ARG1(%esp)
9155 movl rFP, OUT_ARG1(%esp)
9181 movl rFP, OUT_ARG1(%esp)
9207 movl rFP, OUT_ARG1(%esp)
9233 movl rFP, OUT_ARG1(%esp)
9259 movl rFP, OUT_ARG1(%esp)
9285 movl rFP, OUT_ARG1(%esp)
9311 movl rFP, OUT_ARG1(%esp)
9337 movl rFP, OUT_ARG1(%esp)
9363 movl rFP, OUT_ARG1(%esp)
9389 movl rFP, OUT_ARG1(%esp)
9415 movl rFP, OUT_ARG1(%esp)
9441 movl rFP, OUT_ARG1(%esp)
9467 movl rFP, OUT_ARG1(%esp)
9493 movl rFP, OUT_ARG1(%esp)
9519 movl rFP, OUT_ARG1(%esp)
9545 movl rFP, OUT_ARG1(%esp)
9571 movl rFP, OUT_ARG1(%esp)
9597 movl rFP, OUT_ARG1(%esp)
9623 movl rFP, OUT_ARG1(%esp)
9649 movl rFP, OUT_ARG1(%esp)
9675 movl rFP, OUT_ARG1(%esp)
9701 movl rFP, OUT_ARG1(%esp)
9727 movl rFP, OUT_ARG1(%esp)
9753 movl rFP, OUT_ARG1(%esp)
9779 movl rFP, OUT_ARG1(%esp)
9805 movl rFP, OUT_ARG1(%esp)
9831 movl rFP, OUT_ARG1(%esp)
9857 movl rFP, OUT_ARG1(%esp)
9883 movl rFP, OUT_ARG1(%esp)
9909 movl rFP, OUT_ARG1(%esp)
9935 movl rFP, OUT_ARG1(%esp)
9961 movl rFP, OUT_ARG1(%esp)
9987 movl rFP, OUT_ARG1(%esp)
10013 movl rFP, OUT_ARG1(%esp)
10039 movl rFP, OUT_ARG1(%esp)
10065 movl rFP, OUT_ARG1(%esp)
10091 movl rFP, OUT_ARG1(%esp)
10117 movl rFP, OUT_ARG1(%esp)
10143 movl rFP, OUT_ARG1(%esp)
10169 movl rFP, OUT_ARG1(%esp)
10195 movl rFP, OUT_ARG1(%esp)
10221 movl rFP, OUT_ARG1(%esp)
10247 movl rFP, OUT_ARG1(%esp)
10273 movl rFP, OUT_ARG1(%esp)
10299 movl rFP, OUT_ARG1(%esp)
10325 movl rFP, OUT_ARG1(%esp)
10351 movl rFP, OUT_ARG1(%esp)
10377 movl rFP, OUT_ARG1(%esp)
10403 movl rFP, OUT_ARG1(%esp)
10429 movl rFP, OUT_ARG1(%esp)
10455 movl rFP, OUT_ARG1(%esp)
10481 movl rFP, OUT_ARG1(%esp)
10507 movl rFP, OUT_ARG1(%esp)
10533 movl rFP, OUT_ARG1(%esp)
10559 movl rFP, OUT_ARG1(%esp)
10585 movl rFP, OUT_ARG1(%esp)
10611 movl rFP, OUT_ARG1(%esp)
10637 movl rFP, OUT_ARG1(%esp)
10663 movl rFP, OUT_ARG1(%esp)
10689 movl rFP, OUT_ARG1(%esp)
10715 movl rFP, OUT_ARG1(%esp)
10741 movl rFP, OUT_ARG1(%esp)
10767 movl rFP, OUT_ARG1(%esp)
10793 movl rFP, OUT_ARG1(%esp)
10819 movl rFP, OUT_ARG1(%esp)
10845 movl rFP, OUT_ARG1(%esp)
10871 movl rFP, OUT_ARG1(%esp)
10897 movl rFP, OUT_ARG1(%esp)
10923 movl rFP, OUT_ARG1(%esp)
10949 movl rFP, OUT_ARG1(%esp)
10975 movl rFP, OUT_ARG1(%esp)
11001 movl rFP, OUT_ARG1(%esp)
11027 movl rFP, OUT_ARG1(%esp)
11053 movl rFP, OUT_ARG1(%esp)
11079 movl rFP, OUT_ARG1(%esp)
11105 movl rFP, OUT_ARG1(%esp)
11131 movl rFP, OUT_ARG1(%esp)
11157 movl rFP, OUT_ARG1(%esp)
11183 movl rFP, OUT_ARG1(%esp)
11209 movl rFP, OUT_ARG1(%esp)
11235 movl rFP, OUT_ARG1(%esp)
11261 movl rFP, OUT_ARG1(%esp)
11287 movl rFP, OUT_ARG1(%esp)
11313 movl rFP, OUT_ARG1(%esp)
11339 movl rFP, OUT_ARG1(%esp)
11365 movl rFP, OUT_ARG1(%esp)
11391 movl rFP, OUT_ARG1(%esp)
11417 movl rFP, OUT_ARG1(%esp)
11443 movl rFP, OUT_ARG1(%esp)
11469 movl rFP, OUT_ARG1(%esp)
11495 movl rFP, OUT_ARG1(%esp)
11521 movl rFP, OUT_ARG1(%esp)
11547 movl rFP, OUT_ARG1(%esp)
11573 movl rFP, OUT_ARG1(%esp)
11599 movl rFP, OUT_ARG1(%esp)
11625 movl rFP, OUT_ARG1(%esp)
11651 movl rFP, OUT_ARG1(%esp)
11677 movl rFP, OUT_ARG1(%esp)
11703 movl rFP, OUT_ARG1(%esp)
11729 movl rFP, OUT_ARG1(%esp)
11755 movl rFP, OUT_ARG1(%esp)
11781 movl rFP, OUT_ARG1(%esp)
11807 movl rFP, OUT_ARG1(%esp)
11833 movl rFP, OUT_ARG1(%esp)
11859 movl rFP, OUT_ARG1(%esp)
11885 movl rFP, OUT_ARG1(%esp)
11911 movl rFP, OUT_ARG1(%esp)
11937 movl rFP, OUT_ARG1(%esp)
11963 movl rFP, OUT_ARG1(%esp)
11989 movl rFP, OUT_ARG1(%esp)
12015 movl rFP, OUT_ARG1(%esp)
12041 movl rFP, OUT_ARG1(%esp)
12067 movl rFP, OUT_ARG1(%esp)
12093 movl rFP, OUT_ARG1(%esp)
12119 movl rFP, OUT_ARG1(%esp)
12145 movl rFP, OUT_ARG1(%esp)
12171 movl rFP, OUT_ARG1(%esp)
12197 movl rFP, OUT_ARG1(%esp)
12223 movl rFP, OUT_ARG1(%esp)
12249 movl rFP, OUT_ARG1(%esp)
12275 movl rFP, OUT_ARG1(%esp)
12301 movl rFP, OUT_ARG1(%esp)
12327 movl rFP, OUT_ARG1(%esp)
12353 movl rFP, OUT_ARG1(%esp)
12379 movl rFP, OUT_ARG1(%esp)
12405 movl rFP, OUT_ARG1(%esp)
12431 movl rFP, OUT_ARG1(%esp)
12457 movl rFP, OUT_ARG1(%esp)
12483 movl rFP, OUT_ARG1(%esp)
12509 movl rFP, OUT_ARG1(%esp)
12535 movl rFP, OUT_ARG1(%esp)
12561 movl rFP, OUT_ARG1(%esp)
12587 movl rFP, OUT_ARG1(%esp)
12613 movl rFP, OUT_ARG1(%esp)
12639 movl rFP, OUT_ARG1(%esp)
12665 movl rFP, OUT_ARG1(%esp)
12691 movl rFP, OUT_ARG1(%esp)
12717 movl rFP, OUT_ARG1(%esp)
12743 movl rFP, OUT_ARG1(%esp)
12769 movl rFP, OUT_ARG1(%esp)
12795 movl rFP, OUT_ARG1(%esp)
12821 movl rFP, OUT_ARG1(%esp)
12847 movl rFP, OUT_ARG1(%esp)
12873 movl rFP, OUT_ARG1(%esp)
12899 movl rFP, OUT_ARG1(%esp)
12925 movl rFP, OUT_ARG1(%esp)
12951 movl rFP, OUT_ARG1(%esp)
12977 movl rFP, OUT_ARG1(%esp)
13003 movl rFP, OUT_ARG1(%esp)
13029 movl rFP, OUT_ARG1(%esp)
13055 movl rFP, OUT_ARG1(%esp)
13081 movl rFP, OUT_ARG1(%esp)
13107 movl rFP, OUT_ARG1(%esp)
13133 movl rFP, OUT_ARG1(%esp)
13159 movl rFP, OUT_ARG1(%esp)
13185 movl rFP, OUT_ARG1(%esp)
13211 movl rFP, OUT_ARG1(%esp)
13237 movl rFP, OUT_ARG1(%esp)
13263 movl rFP, OUT_ARG1(%esp)
13289 movl rFP, OUT_ARG1(%esp)
13315 movl rFP, OUT_ARG1(%esp)
13341 movl rFP, OUT_ARG1(%esp)
13367 movl rFP, OUT_ARG1(%esp)
13393 movl rFP, OUT_ARG1(%esp)
13419 movl rFP, OUT_ARG1(%esp)
13445 movl rFP, OUT_ARG1(%esp)
13471 movl rFP, OUT_ARG1(%esp)
13497 movl rFP, OUT_ARG1(%esp)
13523 movl rFP, OUT_ARG1(%esp)
13549 movl rFP, OUT_ARG1(%esp)
13575 movl rFP, OUT_ARG1(%esp)
13601 movl rFP, OUT_ARG1(%esp)
13627 movl rFP, OUT_ARG1(%esp)
13653 movl rFP, OUT_ARG1(%esp)
13679 movl rFP, OUT_ARG1(%esp)
13705 movl rFP, OUT_ARG1(%esp)
13731 movl rFP, OUT_ARG1(%esp)
13757 movl rFP, OUT_ARG1(%esp)
13783 movl rFP, OUT_ARG1(%esp)
13809 movl rFP, OUT_ARG1(%esp)
13835 movl rFP, OUT_ARG1(%esp)
13861 movl rFP, OUT_ARG1(%esp)
13887 movl rFP, OUT_ARG1(%esp)
13913 movl rFP, OUT_ARG1(%esp)
13939 movl rFP, OUT_ARG1(%esp)
13965 movl rFP, OUT_ARG1(%esp)
13991 movl rFP, OUT_ARG1(%esp)
14017 movl rFP, OUT_ARG1(%esp)
14043 movl rFP, OUT_ARG1(%esp)
14069 movl rFP, OUT_ARG1(%esp)
14095 movl rFP, OUT_ARG1(%esp)
14121 movl rFP, OUT_ARG1(%esp)
14147 movl rFP, OUT_ARG1(%esp)
14173 movl rFP, OUT_ARG1(%esp)
14199 movl rFP, OUT_ARG1(%esp)
14225 movl rFP, OUT_ARG1(%esp)
14251 movl rFP, OUT_ARG1(%esp)
14277 movl rFP, OUT_ARG1(%esp)
14303 movl rFP, OUT_ARG1(%esp)
14329 movl rFP, OUT_ARG1(%esp)
14355 movl rFP, OUT_ARG1(%esp)
14381 movl rFP, OUT_ARG1(%esp)
14407 movl rFP, OUT_ARG1(%esp)
14433 movl rFP, OUT_ARG1(%esp)
14459 movl rFP, OUT_ARG1(%esp)
14485 movl rFP, OUT_ARG1(%esp)
14511 movl rFP, OUT_ARG1(%esp)
14537 movl rFP, OUT_ARG1(%esp)
14563 movl rFP, OUT_ARG1(%esp)
14589 movl rFP, OUT_ARG1(%esp)
14615 movl rFP, OUT_ARG1(%esp)
14641 movl rFP, OUT_ARG1(%esp)
14667 movl rFP, OUT_ARG1(%esp)
14693 movl rFP, OUT_ARG1(%esp)
14719 movl rFP, OUT_ARG1(%esp)
14745 movl rFP, OUT_ARG1(%esp)
14771 movl rFP, OUT_ARG1(%esp)
14797 movl rFP, OUT_ARG1(%esp)
14823 movl rFP, OUT_ARG1(%esp)
14849 movl rFP, OUT_ARG1(%esp)
14875 movl rFP, OUT_ARG1(%esp)
14901 movl rFP, OUT_ARG1(%esp)
14927 movl rFP, OUT_ARG1(%esp)
14953 movl rFP, OUT_ARG1(%esp)
14979 movl rFP, OUT_ARG1(%esp)
15005 movl rFP, OUT_ARG1(%esp)
15031 movl rFP, OUT_ARG1(%esp)
15609 movl offThread_curFrame(%ecx),rFP
16141 movl (rFP, rINST, 4), %ecx # %ecx<- vA
16147 movl (rFP, %ecx, 4), %ecx # %ecx<- vG
16154 movl (rFP, %ecx, 4), %ecx # %ecx<- vF
16161 movl (rFP, %ecx, 4), %ecx # %ecx<- vE
16166 movl (rFP, %ecx, 4), %ecx # %ecx<- vD
16203 movl rFP, offStackSaveArea_prevFrame(%edx) # newSaveArea->prevFrame<- rFP
16227 movl LOCAL1_OFFSET(%ebp), rFP # rFP<- newFP
16228 movl rFP, offThread_curFrame(%ecx) # curFrame<-newFP
16281 movl rFP, offThread_curFrame(%eax) # curFrame<- rFP
16298 movl rFP, OUT_ARG2(%esp)
16315 movl rFP, OUT_ARG2(%esp)
16337 movl offStackSaveArea_prevFrame(%eax), rFP # rFP<- saveArea->PrevFrame
16338 …movl (offStackSaveArea_method - sizeofStackSaveArea)(rFP), rINST # rINST<- method we are re…
16349 movl rFP, offThread_curFrame(%eax) # glue->self->curFrame<- rFP
16410 movl rFP,offThread_curFrame(%ecx)
16485 movl offThread_curFrame(%eax),rFP