• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1
2/*--------------------------------------------------------------------*/
3/*--- The core dispatch loop, for jumping to a code address.       ---*/
4/*---                                     dispatch-amd64-solaris.S ---*/
5/*--------------------------------------------------------------------*/
6
7/*
8  This file is part of Valgrind, a dynamic binary instrumentation
9  framework.
10
11  Copyright (C) 2000-2017 Julian Seward
12     jseward@acm.org
13
14  This program is free software; you can redistribute it and/or
15  modify it under the terms of the GNU General Public License as
16  published by the Free Software Foundation; either version 2 of the
17  License, or (at your option) any later version.
18
19  This program is distributed in the hope that it will be useful, but
20  WITHOUT ANY WARRANTY; without even the implied warranty of
21  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
22  General Public License for more details.
23
24  You should have received a copy of the GNU General Public License
25  along with this program; if not, write to the Free Software
26  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
27  02111-1307, USA.
28
29  The GNU General Public License is contained in the file COPYING.
30*/
31
32#include "pub_core_basics_asm.h"
33
34#if defined(VGP_amd64_solaris)
35
36#include "pub_core_dispatch_asm.h"
37#include "pub_core_transtab_asm.h"
38#include "libvex_guest_offsets.h"	/* for OFFSET_amd64_RIP */
39
40
41/*------------------------------------------------------------*/
42/*---                                                      ---*/
43/*--- The dispatch loop.  VG_(disp_run_translations) is    ---*/
44/*--- used to run all translations,                        ---*/
45/*--- including no-redir ones.                             ---*/
46/*---                                                      ---*/
47/*------------------------------------------------------------*/
48
49/*----------------------------------------------------*/
50/*--- Entry and preamble (set everything up)       ---*/
51/*----------------------------------------------------*/
52
53/* signature:
54void VG_(disp_run_translations)( UWord* two_words,
55                                 void*  guest_state,
56                                 Addr   host_addr );
57*/
58.text
59.globl VG_(disp_run_translations)
60.type  VG_(disp_run_translations), @function
61VG_(disp_run_translations):
62        /* %rdi holds two_words    */
63	/* %rsi holds guest_state  */
64	/* %rdx holds host_addr    */
65
66        /* The preamble */
67
68        /* Save integer registers, since this is a pseudo-function. */
69        pushq   %rax
70	pushq	%rbx
71	pushq	%rcx
72        pushq   %rdx
73	pushq	%rsi
74	pushq	%rbp
75	pushq	%r8
76	pushq	%r9
77	pushq	%r10
78	pushq	%r11
79	pushq	%r12
80	pushq	%r13
81	pushq	%r14
82	pushq	%r15
83        /* %rdi must be saved last */
84	pushq	%rdi
85
86        /* Get the host CPU in the state expected by generated code. */
87
88	/* set host FPU control word to the default mode expected
89           by VEX-generated code.  See comments in libvex.h for
90           more info. */
91	finit
92	pushq	$0x027F
93	fldcw	(%rsp)
94	addq	$8, %rsp
95
96	/* set host SSE control word to the default mode expected
97	   by VEX-generated code. */
98	pushq	$0x1F80
99	ldmxcsr	(%rsp)
100	addq	$8, %rsp
101
102	/* set dir flag to known value */
103	cld
104
105	/* Set up the guest state pointer */
106	movq	%rsi, %rbp
107
108        /* and jump into the code cache.  Chained translations in
109           the code cache run, until for whatever reason, they can't
110           continue.  When that happens, the translation in question
111           will jump (or call) to one of the continuation points
112           VG_(cp_...) below. */
113        jmpq    *%rdx
114       	/*NOTREACHED*/
115
116/*----------------------------------------------------*/
117/*--- Postamble and exit.                          ---*/
118/*----------------------------------------------------*/
119
120postamble:
121        /* At this point, %rax and %rdx contain two
122           words to be returned to the caller.  %rax
123           holds a TRC value, and %rdx optionally may
124           hold another word (for CHAIN_ME exits, the
125           address of the place to patch.) */
126
127	/* We're leaving.  Check that nobody messed with %mxcsr
128           or %fpucw.  We can't mess with %rax or %rdx here as they
129           hold the tentative return values, but any others are OK. */
130#if !defined(ENABLE_INNER)
131        /* This check fails for self-hosting, so skip in that case */
132	pushq	$0
133	fstcw	(%rsp)
134	cmpl	$0x027F, (%rsp)
135	popq	%r15 /* get rid of the word without trashing %rflags */
136	jnz	invariant_violation
137#endif
138	pushq	$0
139	stmxcsr	(%rsp)
140	andl	$0xFFFFFFC0, (%rsp)  /* mask out status flags */
141	cmpl	$0x1F80, (%rsp)
142	popq	%r15
143	jnz	invariant_violation
144	/* otherwise we're OK */
145	jmp	remove_frame
146invariant_violation:
147	movq	$VG_TRC_INVARIANT_FAILED, %rax
148        movq    $0, %rdx
149
150remove_frame:
151        /* Pop %rdi, stash return values */
152	popq	%rdi
153        movq    %rax, 0(%rdi)
154        movq    %rdx, 8(%rdi)
155        /* Now pop everything else */
156	popq	%r15
157	popq	%r14
158	popq	%r13
159	popq	%r12
160	popq	%r11
161	popq	%r10
162	popq	%r9
163	popq	%r8
164	popq	%rbp
165	popq	%rsi
166	popq	%rdx
167	popq	%rcx
168	popq	%rbx
169	popq	%rax
170	ret
171
172/*----------------------------------------------------*/
173/*--- Continuation points                          ---*/
174/*----------------------------------------------------*/
175
176/* ------ Chain me to slow entry point ------ */
177.global VG_(disp_cp_chain_me_to_slowEP)
178VG_(disp_cp_chain_me_to_slowEP):
179        /* We got called.  The return address indicates
180           where the patching needs to happen.  Collect
181           the return address and, exit back to C land,
182           handing the caller the pair (Chain_me_S, RA) */
183        movq    $VG_TRC_CHAIN_ME_TO_SLOW_EP, %rax
184        popq    %rdx
185        /* 10 = movabsq $VG_(disp_chain_me_to_slowEP), %r11;
186           3  = call *%r11 */
187        subq    $10+3, %rdx
188        jmp     postamble
189
190/* ------ Chain me to fast entry point ------ */
191.global VG_(disp_cp_chain_me_to_fastEP)
192VG_(disp_cp_chain_me_to_fastEP):
193        /* We got called.  The return address indicates
194           where the patching needs to happen.  Collect
195           the return address and, exit back to C land,
196           handing the caller the pair (Chain_me_F, RA) */
197        movq    $VG_TRC_CHAIN_ME_TO_FAST_EP, %rax
198        popq    %rdx
199        /* 10 = movabsq $VG_(disp_chain_me_to_fastEP), %r11;
200           3  = call *%r11 */
201        subq    $10+3, %rdx
202        jmp     postamble
203
204/* ------ Indirect but boring jump ------ */
205.global VG_(disp_cp_xindir)
206VG_(disp_cp_xindir):
207	/* Where are we going? */
208	movq	OFFSET_amd64_RIP(%rbp), %rax
209
210        /* stats only */
211        addl    $1, VG_(stats__n_xindirs_32)
212
213	/* try a fast lookup in the translation cache */
214	movabsq $VG_(tt_fast), %rcx
215	movq	%rax, %rbx		/* next guest addr */
216	andq	$VG_TT_FAST_MASK, %rbx	/* entry# */
217	shlq	$4, %rbx		/* entry# * sizeof(FastCacheEntry) */
218	movq	0(%rcx,%rbx,1), %r10	/* .guest */
219	movq	8(%rcx,%rbx,1), %r11	/* .host */
220	cmpq	%rax, %r10
221	jnz	fast_lookup_failed
222
223        /* Found a match.  Jump to .host. */
224	jmp 	*%r11
225	ud2	/* persuade insn decoders not to speculate past here */
226
227fast_lookup_failed:
228        /* stats only */
229        addl    $1, VG_(stats__n_xindir_misses_32)
230
231	movq	$VG_TRC_INNER_FASTMISS, %rax
232        movq    $0, %rdx
233	jmp	postamble
234
235/* ------ Assisted jump ------ */
236.global VG_(disp_cp_xassisted)
237VG_(disp_cp_xassisted):
238        /* %rbp contains the TRC */
239        movq    %rbp, %rax
240        movq    $0, %rdx
241        jmp     postamble
242
243/* ------ Event check failed ------ */
244.global VG_(disp_cp_evcheck_fail)
245VG_(disp_cp_evcheck_fail):
246       	movq	$VG_TRC_INNER_COUNTERZERO, %rax
247        movq    $0, %rdx
248	jmp	postamble
249
250
251.size VG_(disp_run_translations), .-VG_(disp_run_translations)
252
253#endif // defined(VGP_amd64_solaris)
254
255/* Let the linker know we don't need an executable stack */
256MARK_STACK_NO_EXEC
257
258/*--------------------------------------------------------------------*/
259/*--- end                                                          ---*/
260/*--------------------------------------------------------------------*/
261