• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1
2/*--------------------------------------------------------------------*/
3/*--- The core dispatch loop, for jumping to a code address.       ---*/
4/*---                                      dispatch-amd64-darwin.S ---*/
5/*--------------------------------------------------------------------*/
6
7/*
8  This file is part of Valgrind, a dynamic binary instrumentation
9  framework.
10
11  Copyright (C) 2000-2017 Julian Seward
12     jseward@acm.org
13
14  This program is free software; you can redistribute it and/or
15  modify it under the terms of the GNU General Public License as
16  published by the Free Software Foundation; either version 2 of the
17  License, or (at your option) any later version.
18
19  This program is distributed in the hope that it will be useful, but
20  WITHOUT ANY WARRANTY; without even the implied warranty of
21  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
22  General Public License for more details.
23
24  You should have received a copy of the GNU General Public License
25  along with this program; if not, write to the Free Software
26  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
27  02111-1307, USA.
28
29  The GNU General Public License is contained in the file COPYING.
30*/
31
32#include "pub_core_basics_asm.h"
33
34#if defined(VGP_amd64_darwin)
35
36#include "pub_core_dispatch_asm.h"
37#include "pub_core_transtab_asm.h"
38#include "libvex_guest_offsets.h"	/* for OFFSET_amd64_RIP */
39
40
41/*------------------------------------------------------------*/
42/*---                                                      ---*/
43/*--- The dispatch loop.  VG_(disp_run_translations) is    ---*/
44/*--- used to run all translations,                        ---*/
45/*--- including no-redir ones.                             ---*/
46/*---                                                      ---*/
47/*------------------------------------------------------------*/
48
49/*----------------------------------------------------*/
50/*--- Entry and preamble (set everything up)       ---*/
51/*----------------------------------------------------*/
52
53/* signature:
54void VG_(disp_run_translations)( UWord* two_words,
55                                 void*  guest_state,
56                                 Addr   host_addr );
57*/
58.text
59.globl VG_(disp_run_translations)
60VG_(disp_run_translations):
61        /* %rdi holds two_words    */
62	/* %rsi holds guest_state  */
63	/* %rdx holds host_addr    */
64
65        /* The preamble */
66
67        /* Save integer registers, since this is a pseudo-function. */
68        pushq   %rax
69	pushq	%rbx
70	pushq	%rcx
71        pushq   %rdx
72	pushq	%rsi
73	pushq	%rbp
74	pushq	%r8
75	pushq	%r9
76	pushq	%r10
77	pushq	%r11
78	pushq	%r12
79	pushq	%r13
80	pushq	%r14
81	pushq	%r15
82        /* %rdi must be saved last */
83	pushq	%rdi
84
85        /* Get the host CPU in the state expected by generated code. */
86
87	/* set host FPU control word to the default mode expected
88           by VEX-generated code.  See comments in libvex.h for
89           more info. */
90	finit
91	pushq	$0x027F
92	fldcw	(%rsp)
93	addq	$8, %rsp
94
95	/* set host SSE control word to the default mode expected
96	   by VEX-generated code. */
97	pushq	$0x1F80
98	ldmxcsr	(%rsp)
99	addq	$8, %rsp
100
101	/* set dir flag to known value */
102	cld
103
104	/* Set up the guest state pointer */
105	movq	%rsi, %rbp
106
107        /* and jump into the code cache.  Chained translations in
108           the code cache run, until for whatever reason, they can't
109           continue.  When that happens, the translation in question
110           will jump (or call) to one of the continuation points
111           VG_(cp_...) below. */
112        jmpq    *%rdx
113       	/*NOTREACHED*/
114
115/*----------------------------------------------------*/
116/*--- Postamble and exit.                          ---*/
117/*----------------------------------------------------*/
118
119postamble:
120        /* At this point, %rax and %rdx contain two
121           words to be returned to the caller.  %rax
122           holds a TRC value, and %rdx optionally may
123           hold another word (for CHAIN_ME exits, the
124           address of the place to patch.) */
125
126	/* We're leaving.  Check that nobody messed with %mxcsr
127           or %fpucw.  We can't mess with %rax or %rdx here as they
128           hold the tentative return values, but any others are OK. */
129#if !defined(ENABLE_INNER)
130        /* This check fails for self-hosting, so skip in that case */
131	pushq	$0
132	fstcw	(%rsp)
133	cmpl	$0x027F, (%rsp)
134	popq	%r15 /* get rid of the word without trashing %rflags */
135	jnz	invariant_violation
136#endif
137	pushq	$0
138	stmxcsr	(%rsp)
139	andl	$0xFFFFFFC0, (%rsp)  /* mask out status flags */
140	cmpl	$0x1F80, (%rsp)
141	popq	%r15
142	jnz	invariant_violation
143	/* otherwise we're OK */
144	jmp	remove_frame
145invariant_violation:
146	movq	$VG_TRC_INVARIANT_FAILED, %rax
147        movq    $0, %rdx
148
149remove_frame:
150        /* Pop %rdi, stash return values */
151	popq	%rdi
152        movq    %rax, 0(%rdi)
153        movq    %rdx, 8(%rdi)
154        /* Now pop everything else */
155	popq	%r15
156	popq	%r14
157	popq	%r13
158	popq	%r12
159	popq	%r11
160	popq	%r10
161	popq	%r9
162	popq	%r8
163	popq	%rbp
164	popq	%rsi
165	popq	%rdx
166	popq	%rcx
167	popq	%rbx
168	popq	%rax
169	ret
170
171/*----------------------------------------------------*/
172/*--- Continuation points                          ---*/
173/*----------------------------------------------------*/
174
175/* ------ Chain me to slow entry point ------ */
176.globl VG_(disp_cp_chain_me_to_slowEP)
177VG_(disp_cp_chain_me_to_slowEP):
178        /* We got called.  The return address indicates
179           where the patching needs to happen.  Collect
180           the return address and, exit back to C land,
181           handing the caller the pair (Chain_me_S, RA) */
182        movq    $VG_TRC_CHAIN_ME_TO_SLOW_EP, %rax
183        popq    %rdx
184        /* 10 = movabsq $VG_(disp_chain_me_to_slowEP), %r11;
185           3  = call *%r11 */
186        subq    $10+3, %rdx
187        jmp     postamble
188
189/* ------ Chain me to fast entry point ------ */
190.globl VG_(disp_cp_chain_me_to_fastEP)
191VG_(disp_cp_chain_me_to_fastEP):
192        /* We got called.  The return address indicates
193           where the patching needs to happen.  Collect
194           the return address and, exit back to C land,
195           handing the caller the pair (Chain_me_F, RA) */
196        movq    $VG_TRC_CHAIN_ME_TO_FAST_EP, %rax
197        popq    %rdx
198        /* 10 = movabsq $VG_(disp_chain_me_to_fastEP), %r11;
199           3  = call *%r11 */
200        subq    $10+3, %rdx
201        jmp     postamble
202
203/* ------ Indirect but boring jump ------ */
204.globl VG_(disp_cp_xindir)
205VG_(disp_cp_xindir):
206	/* Where are we going? */
207	movq	OFFSET_amd64_RIP(%rbp), %rax
208
209        /* stats only */
210        movabsq $VG_(stats__n_xindirs_32), %r10
211        addl    $1, (%r10)
212
213	/* try a fast lookup in the translation cache */
214	movabsq $VG_(tt_fast), %rcx
215	movq	%rax, %rbx		/* next guest addr */
216	andq	$VG_TT_FAST_MASK, %rbx	/* entry# */
217	shlq	$4, %rbx		/* entry# * sizeof(FastCacheEntry) */
218	movq	0(%rcx,%rbx,1), %r10	/* .guest */
219	movq	8(%rcx,%rbx,1), %r11	/* .host */
220	cmpq	%rax, %r10
221	jnz	fast_lookup_failed
222
223        /* Found a match.  Jump to .host. */
224	jmp 	*%r11
225	ud2	/* persuade insn decoders not to speculate past here */
226
227fast_lookup_failed:
228        /* stats only */
229        movabsq $VG_(stats__n_xindir_misses_32), %r10
230        addl    $1, (%r10)
231
232	movq	$VG_TRC_INNER_FASTMISS, %rax
233        movq    $0, %rdx
234	jmp	postamble
235
236/* ------ Assisted jump ------ */
237.globl VG_(disp_cp_xassisted)
238VG_(disp_cp_xassisted):
239        /* %rbp contains the TRC */
240        movq    %rbp, %rax
241        movq    $0, %rdx
242        jmp     postamble
243
244/* ------ Event check failed ------ */
245.globl VG_(disp_cp_evcheck_fail)
246VG_(disp_cp_evcheck_fail):
247       	movq	$VG_TRC_INNER_COUNTERZERO, %rax
248        movq    $0, %rdx
249	jmp	postamble
250
251
252#endif // defined(VGP_amd64_darwin)
253
254/* Let the linker know we don't need an executable stack */
255MARK_STACK_NO_EXEC
256
257/*--------------------------------------------------------------------*/
258/*--- end                                                          ---*/
259/*--------------------------------------------------------------------*/
260