• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *    Stack-less Just-In-Time compiler
3  *
4  *    Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without modification, are
7  * permitted provided that the following conditions are met:
8  *
9  *   1. Redistributions of source code must retain the above copyright notice, this list of
10  *      conditions and the following disclaimer.
11  *
12  *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13  *      of conditions and the following disclaimer in the documentation and/or other materials
14  *      provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19  * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21  * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26 
sljit_get_platform_name(void)27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28 {
29 	return "PowerPC" SLJIT_CPUINFO;
30 }
31 
32 /* Length of an instruction word.
33    Both for ppc-32 and ppc-64. */
34 typedef sljit_u32 sljit_ins;
35 
36 #if ((defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32) && (defined _AIX)) \
37 	|| (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
38 #define SLJIT_PPC_STACK_FRAME_V2 1
39 #endif
40 
41 #ifdef _AIX
42 #include <sys/cache.h>
43 #endif
44 
45 #if (defined _CALL_ELF && _CALL_ELF == 2)
46 #define SLJIT_PASS_ENTRY_ADDR_TO_CALL 1
47 #endif
48 
49 #if (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL)
50 
ppc_cache_flush(sljit_ins * from,sljit_ins * to)51 static void ppc_cache_flush(sljit_ins *from, sljit_ins *to)
52 {
53 #ifdef _AIX
54 	_sync_cache_range((caddr_t)from, (int)((size_t)to - (size_t)from));
55 #elif defined(__GNUC__) || (defined(__IBM_GCC_ASM) && __IBM_GCC_ASM)
56 #	if defined(_ARCH_PWR) || defined(_ARCH_PWR2)
57 	/* Cache flush for POWER architecture. */
58 	while (from < to) {
59 		__asm__ volatile (
60 			"clf 0, %0\n"
61 			"dcs\n"
62 			: : "r"(from)
63 		);
64 		from++;
65 	}
66 	__asm__ volatile ( "ics" );
67 #	elif defined(_ARCH_COM) && !defined(_ARCH_PPC)
68 #	error "Cache flush is not implemented for PowerPC/POWER common mode."
69 #	else
70 	/* Cache flush for PowerPC architecture. */
71 	while (from < to) {
72 		__asm__ volatile (
73 			"dcbf 0, %0\n"
74 			"sync\n"
75 			"icbi 0, %0\n"
76 			: : "r"(from)
77 		);
78 		from++;
79 	}
80 	__asm__ volatile ( "isync" );
81 #	endif
82 #	ifdef __xlc__
83 #	warning "This file may fail to compile if -qfuncsect is used"
84 #	endif
85 #elif defined(__xlc__)
86 #error "Please enable GCC syntax for inline assembly statements with -qasm=gcc"
87 #else
88 #error "This platform requires a cache flush implementation."
89 #endif /* _AIX */
90 }
91 
92 #endif /* (defined SLJIT_CACHE_FLUSH_OWN_IMPL && SLJIT_CACHE_FLUSH_OWN_IMPL) */
93 
94 #define TMP_REG1	(SLJIT_NUMBER_OF_REGISTERS + 2)
95 #define TMP_REG2	(SLJIT_NUMBER_OF_REGISTERS + 3)
96 #define TMP_ZERO	(SLJIT_NUMBER_OF_REGISTERS + 4)
97 
98 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
99 #define TMP_CALL_REG	(SLJIT_NUMBER_OF_REGISTERS + 5)
100 #else
101 #define TMP_CALL_REG	TMP_REG2
102 #endif
103 
104 #define TMP_FREG1	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
105 #define TMP_FREG2	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
106 
107 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
108 	0, 3, 4, 5, 6, 7, 8, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 1, 9, 10, 31, 12
109 };
110 
111 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
112 	0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 31, 30, 29, 28, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 17, 16, 15, 14, 0, 13
113 };
114 
115 /* --------------------------------------------------------------------- */
116 /*  Instrucion forms                                                     */
117 /* --------------------------------------------------------------------- */
118 #define D(d)		((sljit_ins)reg_map[d] << 21)
119 #define S(s)		((sljit_ins)reg_map[s] << 21)
120 #define A(a)		((sljit_ins)reg_map[a] << 16)
121 #define B(b)		((sljit_ins)reg_map[b] << 11)
122 #define C(c)		((sljit_ins)reg_map[c] << 6)
123 #define FD(fd)		((sljit_ins)freg_map[fd] << 21)
124 #define FS(fs)		((sljit_ins)freg_map[fs] << 21)
125 #define FA(fa)		((sljit_ins)freg_map[fa] << 16)
126 #define FB(fb)		((sljit_ins)freg_map[fb] << 11)
127 #define FC(fc)		((sljit_ins)freg_map[fc] << 6)
128 #define IMM(imm)	((sljit_ins)(imm) & 0xffff)
129 #define CRD(d)		((sljit_ins)(d) << 21)
130 
131 /* Instruction bit sections.
132    OE and Rc flag (see ALT_SET_FLAGS). */
133 #define OE(flags)	((flags) & ALT_SET_FLAGS)
134 /* Rc flag (see ALT_SET_FLAGS). */
135 #define RC(flags)	(((flags) & ALT_SET_FLAGS) >> 10)
136 #define HI(opcode)	((sljit_ins)(opcode) << 26)
137 #define LO(opcode)	((sljit_ins)(opcode) << 1)
138 
139 #define ADD		(HI(31) | LO(266))
140 #define ADDC		(HI(31) | LO(10))
141 #define ADDE		(HI(31) | LO(138))
142 #define ADDI		(HI(14))
143 #define ADDIC		(HI(13))
144 #define ADDIS		(HI(15))
145 #define ADDME		(HI(31) | LO(234))
146 #define AND		(HI(31) | LO(28))
147 #define ANDI		(HI(28))
148 #define ANDIS		(HI(29))
149 #define Bx		(HI(18))
150 #define BCx		(HI(16))
151 #define BCCTR		(HI(19) | LO(528) | (3 << 11))
152 #define BLR		(HI(19) | LO(16) | (0x14 << 21))
153 #define CNTLZD		(HI(31) | LO(58))
154 #define CNTLZW		(HI(31) | LO(26))
155 #define CMP		(HI(31) | LO(0))
156 #define CMPI		(HI(11))
157 #define CMPL		(HI(31) | LO(32))
158 #define CMPLI		(HI(10))
159 #define CROR		(HI(19) | LO(449))
160 #define DCBT		(HI(31) | LO(278))
161 #define DIVD		(HI(31) | LO(489))
162 #define DIVDU		(HI(31) | LO(457))
163 #define DIVW		(HI(31) | LO(491))
164 #define DIVWU		(HI(31) | LO(459))
165 #define EXTSB		(HI(31) | LO(954))
166 #define EXTSH		(HI(31) | LO(922))
167 #define EXTSW		(HI(31) | LO(986))
168 #define FABS		(HI(63) | LO(264))
169 #define FADD		(HI(63) | LO(21))
170 #define FADDS		(HI(59) | LO(21))
171 #define FCFID		(HI(63) | LO(846))
172 #define FCMPU		(HI(63) | LO(0))
173 #define FCTIDZ		(HI(63) | LO(815))
174 #define FCTIWZ		(HI(63) | LO(15))
175 #define FDIV		(HI(63) | LO(18))
176 #define FDIVS		(HI(59) | LO(18))
177 #define FMR		(HI(63) | LO(72))
178 #define FMUL		(HI(63) | LO(25))
179 #define FMULS		(HI(59) | LO(25))
180 #define FNEG		(HI(63) | LO(40))
181 #define FRSP		(HI(63) | LO(12))
182 #define FSUB		(HI(63) | LO(20))
183 #define FSUBS		(HI(59) | LO(20))
184 #define LD		(HI(58) | 0)
185 #define LFD		(HI(50))
186 #define LWZ		(HI(32))
187 #define MFCR		(HI(31) | LO(19))
188 #define MFLR		(HI(31) | LO(339) | 0x80000)
189 #define MFXER		(HI(31) | LO(339) | 0x10000)
190 #define MTCTR		(HI(31) | LO(467) | 0x90000)
191 #define MTLR		(HI(31) | LO(467) | 0x80000)
192 #define MTXER		(HI(31) | LO(467) | 0x10000)
193 #define MULHD		(HI(31) | LO(73))
194 #define MULHDU		(HI(31) | LO(9))
195 #define MULHW		(HI(31) | LO(75))
196 #define MULHWU		(HI(31) | LO(11))
197 #define MULLD		(HI(31) | LO(233))
198 #define MULLI		(HI(7))
199 #define MULLW		(HI(31) | LO(235))
200 #define NEG		(HI(31) | LO(104))
201 #define NOP		(HI(24))
202 #define NOR		(HI(31) | LO(124))
203 #define OR		(HI(31) | LO(444))
204 #define ORI		(HI(24))
205 #define ORIS		(HI(25))
206 #define RLDCL		(HI(30) | LO(8))
207 #define RLDICL		(HI(30) | LO(0 << 1))
208 #define RLDICR		(HI(30) | LO(1 << 1))
209 #define RLDIMI		(HI(30) | LO(3 << 1))
210 #define RLWIMI		(HI(20))
211 #define RLWINM		(HI(21))
212 #define RLWNM		(HI(23))
213 #define SLD		(HI(31) | LO(27))
214 #define SLW		(HI(31) | LO(24))
215 #define SRAD		(HI(31) | LO(794))
216 #define SRADI		(HI(31) | LO(413 << 1))
217 #define SRAW		(HI(31) | LO(792))
218 #define SRAWI		(HI(31) | LO(824))
219 #define SRD		(HI(31) | LO(539))
220 #define SRW		(HI(31) | LO(536))
221 #define STD		(HI(62) | 0)
222 #define STDU		(HI(62) | 1)
223 #define STDUX		(HI(31) | LO(181))
224 #define STFD		(HI(54))
225 #define STFIWX		(HI(31) | LO(983))
226 #define STW		(HI(36))
227 #define STWU		(HI(37))
228 #define STWUX		(HI(31) | LO(183))
229 #define SUBF		(HI(31) | LO(40))
230 #define SUBFC		(HI(31) | LO(8))
231 #define SUBFE		(HI(31) | LO(136))
232 #define SUBFIC		(HI(8))
233 #define XOR		(HI(31) | LO(316))
234 #define XORI		(HI(26))
235 #define XORIS		(HI(27))
236 
237 #define SIMM_MAX	(0x7fff)
238 #define SIMM_MIN	(-0x8000)
239 #define UIMM_MAX	(0xffff)
240 
241 /* Shift helpers. */
242 #define RLWI_SH(sh) ((sljit_ins)(sh) << 11)
243 #define RLWI_MBE(mb, me) (((sljit_ins)(mb) << 6) | ((sljit_ins)(me) << 1))
244 #define RLDI_SH(sh) ((((sljit_ins)(sh) & 0x1f) << 11) | (((sljit_ins)(sh) & 0x20) >> 4))
245 #define RLDI_MB(mb) ((((sljit_ins)(mb) & 0x1f) << 6) | ((sljit_ins)(mb) & 0x20))
246 #define RLDI_ME(me) RLDI_MB(me)
247 
248 #define SLWI(shift) (RLWINM | RLWI_SH(shift) | RLWI_MBE(0, 31 - (shift)))
249 #define SLDI(shift) (RLDICR | RLDI_SH(shift) | RLDI_ME(63 - (shift)))
250 /* shift > 0 */
251 #define SRWI(shift) (RLWINM | RLWI_SH(32 - (shift)) | RLWI_MBE((shift), 31))
252 #define SRDI(shift) (RLDICL | RLDI_SH(64 - (shift)) | RLDI_MB(shift))
253 
254 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
255 #define SLWI_W(shift) SLWI(shift)
256 #else /* !SLJIT_CONFIG_PPC_32 */
257 #define SLWI_W(shift) SLDI(shift)
258 #endif /* SLJIT_CONFIG_PPC_32 */
259 
260 #if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
sljit_set_function_context(void ** func_ptr,struct sljit_function_context * context,sljit_uw addr,void * func)261 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_function_context(void** func_ptr, struct sljit_function_context* context, sljit_uw addr, void* func)
262 {
263 	sljit_uw* ptrs;
264 
265 	if (func_ptr)
266 		*func_ptr = (void*)context;
267 
268 	ptrs = (sljit_uw*)func;
269 	context->addr = addr ? addr : ptrs[0];
270 	context->r2 = ptrs[1];
271 	context->r11 = ptrs[2];
272 }
273 #endif
274 
push_inst(struct sljit_compiler * compiler,sljit_ins ins)275 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
276 {
277 	sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
278 	FAIL_IF(!ptr);
279 	*ptr = ins;
280 	compiler->size++;
281 	return SLJIT_SUCCESS;
282 }
283 
detect_jump_type(struct sljit_jump * jump,sljit_ins * code_ptr,sljit_ins * code,sljit_sw executable_offset)284 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_ins *code, sljit_sw executable_offset)
285 {
286 	sljit_sw diff;
287 	sljit_uw target_addr;
288 	sljit_uw extra_jump_flags;
289 
290 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL) && (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
291 	if (jump->flags & (SLJIT_REWRITABLE_JUMP | IS_CALL))
292 		return 0;
293 #else
294 	if (jump->flags & SLJIT_REWRITABLE_JUMP)
295 		return 0;
296 #endif
297 
298 	if (jump->flags & JUMP_ADDR)
299 		target_addr = jump->u.target;
300 	else {
301 		SLJIT_ASSERT(jump->flags & JUMP_LABEL);
302 		target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
303 	}
304 
305 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL) && (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
306 	if (jump->flags & IS_CALL)
307 		goto keep_address;
308 #endif
309 
310 	diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr) - executable_offset) & ~0x3l;
311 
312 	extra_jump_flags = 0;
313 	if (jump->flags & IS_COND) {
314 		if (diff <= 0x7fff && diff >= -0x8000) {
315 			jump->flags |= PATCH_B;
316 			return 1;
317 		}
318 		if (target_addr <= 0xffff) {
319 			jump->flags |= PATCH_B | PATCH_ABS_B;
320 			return 1;
321 		}
322 		extra_jump_flags = REMOVE_COND;
323 
324 		diff -= SSIZE_OF(ins);
325 	}
326 
327 	if (diff <= 0x01ffffff && diff >= -0x02000000) {
328 		jump->flags |= PATCH_B | extra_jump_flags;
329 		return 1;
330 	}
331 
332 	if (target_addr <= 0x03ffffff) {
333 		jump->flags |= PATCH_B | PATCH_ABS_B | extra_jump_flags;
334 		return 1;
335 	}
336 
337 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
338 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
339 keep_address:
340 #endif
341 	if (target_addr <= 0x7fffffff) {
342 		jump->flags |= PATCH_ABS32;
343 		return 1;
344 	}
345 
346 	if (target_addr <= 0x7fffffffffffl) {
347 		jump->flags |= PATCH_ABS48;
348 		return 1;
349 	}
350 #endif
351 
352 	return 0;
353 }
354 
355 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
356 
put_label_get_length(struct sljit_put_label * put_label,sljit_uw max_label)357 static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
358 {
359 	if (max_label < 0x100000000l) {
360 		put_label->flags = 0;
361 		return 1;
362 	}
363 
364 	if (max_label < 0x1000000000000l) {
365 		put_label->flags = 1;
366 		return 3;
367 	}
368 
369 	put_label->flags = 2;
370 	return 4;
371 }
372 
put_label_set(struct sljit_put_label * put_label)373 static SLJIT_INLINE void put_label_set(struct sljit_put_label *put_label)
374 {
375 	sljit_uw addr = put_label->label->addr;
376 	sljit_ins *inst = (sljit_ins *)put_label->addr;
377 	sljit_u32 reg = *inst;
378 
379 	if (put_label->flags == 0) {
380 		SLJIT_ASSERT(addr < 0x100000000l);
381 		inst[0] = ORIS | S(TMP_ZERO) | A(reg) | IMM(addr >> 16);
382 	}
383 	else {
384 		if (put_label->flags == 1) {
385 			SLJIT_ASSERT(addr < 0x1000000000000l);
386 			inst[0] = ORI | S(TMP_ZERO) | A(reg) | IMM(addr >> 32);
387 		}
388 		else {
389 			inst[0] = ORIS | S(TMP_ZERO) | A(reg) | IMM(addr >> 48);
390 			inst[1] = ORI | S(reg) | A(reg) | IMM((addr >> 32) & 0xffff);
391 			inst++;
392 		}
393 
394 		inst[1] = SLDI(32) | S(reg) | A(reg);
395 		inst[2] = ORIS | S(reg) | A(reg) | IMM((addr >> 16) & 0xffff);
396 		inst += 2;
397 	}
398 
399 	inst[1] = ORI | S(reg) | A(reg) | IMM(addr & 0xffff);
400 }
401 
402 #endif /* SLJIT_CONFIG_PPC_64 */
403 
sljit_generate_code(struct sljit_compiler * compiler)404 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
405 {
406 	struct sljit_memory_fragment *buf;
407 	sljit_ins *code;
408 	sljit_ins *code_ptr;
409 	sljit_ins *buf_ptr;
410 	sljit_ins *buf_end;
411 	sljit_uw word_count;
412 	sljit_uw next_addr;
413 	sljit_sw executable_offset;
414 	sljit_uw addr;
415 
416 	struct sljit_label *label;
417 	struct sljit_jump *jump;
418 	struct sljit_const *const_;
419 	struct sljit_put_label *put_label;
420 
421 	CHECK_ERROR_PTR();
422 	CHECK_PTR(check_sljit_generate_code(compiler));
423 	reverse_buf(compiler);
424 
425 #if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
426 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
427 	compiler->size += (compiler->size & 0x1) + (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
428 #else
429 	compiler->size += (sizeof(struct sljit_function_context) / sizeof(sljit_ins));
430 #endif
431 #endif
432 	code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
433 	PTR_FAIL_WITH_EXEC_IF(code);
434 	buf = compiler->buf;
435 
436 	code_ptr = code;
437 	word_count = 0;
438 	next_addr = 0;
439 	executable_offset = SLJIT_EXEC_OFFSET(code);
440 
441 	label = compiler->labels;
442 	jump = compiler->jumps;
443 	const_ = compiler->consts;
444 	put_label = compiler->put_labels;
445 
446 	do {
447 		buf_ptr = (sljit_ins*)buf->memory;
448 		buf_end = buf_ptr + (buf->used_size >> 2);
449 		do {
450 			*code_ptr = *buf_ptr++;
451 			if (next_addr == word_count) {
452 				SLJIT_ASSERT(!label || label->size >= word_count);
453 				SLJIT_ASSERT(!jump || jump->addr >= word_count);
454 				SLJIT_ASSERT(!const_ || const_->addr >= word_count);
455 				SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
456 
457 				/* These structures are ordered by their address. */
458 				if (label && label->size == word_count) {
459 					/* Just recording the address. */
460 					label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
461 					label->size = (sljit_uw)(code_ptr - code);
462 					label = label->next;
463 				}
464 				if (jump && jump->addr == word_count) {
465 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
466 					jump->addr = (sljit_uw)(code_ptr - 3);
467 #else
468 					jump->addr = (sljit_uw)(code_ptr - 6);
469 #endif
470 					if (detect_jump_type(jump, code_ptr, code, executable_offset)) {
471 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
472 						code_ptr[-3] = code_ptr[0];
473 						code_ptr -= 3;
474 #else
475 						if (jump->flags & PATCH_ABS32) {
476 							code_ptr -= 3;
477 							code_ptr[-1] = code_ptr[2];
478 							code_ptr[0] = code_ptr[3];
479 						}
480 						else if (jump->flags & PATCH_ABS48) {
481 							code_ptr--;
482 							code_ptr[-1] = code_ptr[0];
483 							code_ptr[0] = code_ptr[1];
484 							/* rldicr rX,rX,32,31 -> rX,rX,16,47 */
485 							SLJIT_ASSERT((code_ptr[-3] & 0xfc00ffff) == 0x780007c6);
486 							code_ptr[-3] ^= 0x8422;
487 							/* oris -> ori */
488 							code_ptr[-2] ^= 0x4000000;
489 						}
490 						else {
491 							code_ptr[-6] = code_ptr[0];
492 							code_ptr -= 6;
493 						}
494 #endif
495 						if (jump->flags & REMOVE_COND) {
496 							code_ptr[0] = BCx | (2 << 2) | ((code_ptr[0] ^ (8 << 21)) & 0x03ff0001);
497 							code_ptr++;
498 							jump->addr += sizeof(sljit_ins);
499 							code_ptr[0] = Bx;
500 							jump->flags -= IS_COND;
501 						}
502 					}
503 					jump = jump->next;
504 				}
505 				if (const_ && const_->addr == word_count) {
506 					const_->addr = (sljit_uw)code_ptr;
507 					const_ = const_->next;
508 				}
509 				if (put_label && put_label->addr == word_count) {
510 					SLJIT_ASSERT(put_label->label);
511 					put_label->addr = (sljit_uw)code_ptr;
512 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
513 					code_ptr += put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
514 					word_count += 4;
515 #endif
516 					put_label = put_label->next;
517 				}
518 				next_addr = compute_next_addr(label, jump, const_, put_label);
519 			}
520 			code_ptr++;
521 			word_count++;
522 		} while (buf_ptr < buf_end);
523 
524 		buf = buf->next;
525 	} while (buf);
526 
527 	if (label && label->size == word_count) {
528 		label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
529 		label->size = (sljit_uw)(code_ptr - code);
530 		label = label->next;
531 	}
532 
533 	SLJIT_ASSERT(!label);
534 	SLJIT_ASSERT(!jump);
535 	SLJIT_ASSERT(!const_);
536 	SLJIT_ASSERT(!put_label);
537 
538 #if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
539 	SLJIT_ASSERT(code_ptr - code <= (sljit_sw)(compiler->size - (sizeof(struct sljit_function_context) / sizeof(sljit_ins))));
540 #else
541 	SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
542 #endif
543 
544 	jump = compiler->jumps;
545 	while (jump) {
546 		do {
547 			addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
548 			buf_ptr = (sljit_ins *)jump->addr;
549 
550 			if (jump->flags & PATCH_B) {
551 				if (jump->flags & IS_COND) {
552 					if (!(jump->flags & PATCH_ABS_B)) {
553 						addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
554 						SLJIT_ASSERT((sljit_sw)addr <= 0x7fff && (sljit_sw)addr >= -0x8000);
555 						*buf_ptr = BCx | ((sljit_ins)addr & 0xfffc) | ((*buf_ptr) & 0x03ff0001);
556 					}
557 					else {
558 						SLJIT_ASSERT(addr <= 0xffff);
559 						*buf_ptr = BCx | ((sljit_ins)addr & 0xfffc) | 0x2 | ((*buf_ptr) & 0x03ff0001);
560 					}
561 				}
562 				else {
563 					if (!(jump->flags & PATCH_ABS_B)) {
564 						addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
565 						SLJIT_ASSERT((sljit_sw)addr <= 0x01ffffff && (sljit_sw)addr >= -0x02000000);
566 						*buf_ptr = Bx | ((sljit_ins)addr & 0x03fffffc) | ((*buf_ptr) & 0x1);
567 					}
568 					else {
569 						SLJIT_ASSERT(addr <= 0x03ffffff);
570 						*buf_ptr = Bx | ((sljit_ins)addr & 0x03fffffc) | 0x2 | ((*buf_ptr) & 0x1);
571 					}
572 				}
573 				break;
574 			}
575 
576 			/* Set the fields of immediate loads. */
577 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
578 			SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1]) & 0xffff) == 0);
579 			buf_ptr[0] |= (sljit_ins)(addr >> 16) & 0xffff;
580 			buf_ptr[1] |= (sljit_ins)addr & 0xffff;
581 #else
582 			if (jump->flags & PATCH_ABS32) {
583 				SLJIT_ASSERT(addr <= 0x7fffffff);
584 				SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1]) & 0xffff) == 0);
585 				buf_ptr[0] |= (sljit_ins)(addr >> 16) & 0xffff;
586 				buf_ptr[1] |= (sljit_ins)addr & 0xffff;
587 				break;
588 			}
589 
590 			if (jump->flags & PATCH_ABS48) {
591 				SLJIT_ASSERT(addr <= 0x7fffffffffff);
592 				SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1] | buf_ptr[3]) & 0xffff) == 0);
593 				buf_ptr[0] |= (sljit_ins)(addr >> 32) & 0xffff;
594 				buf_ptr[1] |= (sljit_ins)(addr >> 16) & 0xffff;
595 				buf_ptr[3] |= (sljit_ins)addr & 0xffff;
596 				break;
597 			}
598 
599 			SLJIT_ASSERT(((buf_ptr[0] | buf_ptr[1] | buf_ptr[3] | buf_ptr[4]) & 0xffff) == 0);
600 			buf_ptr[0] |= (sljit_ins)(addr >> 48) & 0xffff;
601 			buf_ptr[1] |= (sljit_ins)(addr >> 32) & 0xffff;
602 			buf_ptr[3] |= (sljit_ins)(addr >> 16) & 0xffff;
603 			buf_ptr[4] |= (sljit_ins)addr & 0xffff;
604 #endif
605 		} while (0);
606 		jump = jump->next;
607 	}
608 
609 	put_label = compiler->put_labels;
610 	while (put_label) {
611 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
612 		addr = put_label->label->addr;
613 		buf_ptr = (sljit_ins *)put_label->addr;
614 
615 		SLJIT_ASSERT((buf_ptr[0] & 0xfc1f0000) == ADDIS && (buf_ptr[1] & 0xfc000000) == ORI);
616 		buf_ptr[0] |= (addr >> 16) & 0xffff;
617 		buf_ptr[1] |= addr & 0xffff;
618 #else
619 		put_label_set(put_label);
620 #endif
621 		put_label = put_label->next;
622 	}
623 
624 	compiler->error = SLJIT_ERR_COMPILED;
625 	compiler->executable_offset = executable_offset;
626 	compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
627 
628 	code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
629 
630 #if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
631 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
632 	if (((sljit_sw)code_ptr) & 0x4)
633 		code_ptr++;
634 #endif
635 	sljit_set_function_context(NULL, (struct sljit_function_context*)code_ptr, (sljit_uw)code, (void*)sljit_generate_code);
636 #endif
637 
638 	code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
639 
640 	SLJIT_CACHE_FLUSH(code, code_ptr);
641 	SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
642 
643 #if (defined SLJIT_INDIRECT_CALL && SLJIT_INDIRECT_CALL)
644 	return code_ptr;
645 #else
646 	return code;
647 #endif
648 }
649 
sljit_has_cpu_feature(sljit_s32 feature_type)650 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
651 {
652 	switch (feature_type) {
653 	case SLJIT_HAS_FPU:
654 #ifdef SLJIT_IS_FPU_AVAILABLE
655 		return SLJIT_IS_FPU_AVAILABLE;
656 #else
657 		/* Available by default. */
658 		return 1;
659 #endif
660 
661 	/* A saved register is set to a zero value. */
662 	case SLJIT_HAS_ZERO_REGISTER:
663 	case SLJIT_HAS_CLZ:
664 	case SLJIT_HAS_ROT:
665 	case SLJIT_HAS_PREFETCH:
666 		return 1;
667 
668 	case SLJIT_HAS_CTZ:
669 		return 2;
670 
671 	default:
672 		return 0;
673 	}
674 }
675 
sljit_cmp_info(sljit_s32 type)676 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
677 {
678 	return (type >= SLJIT_UNORDERED && type <= SLJIT_ORDERED_LESS_EQUAL);
679 }
680 
681 /* --------------------------------------------------------------------- */
682 /*  Entry, exit                                                          */
683 /* --------------------------------------------------------------------- */
684 
685 /* inp_flags: */
686 
687 /* Creates an index in data_transfer_insts array. */
688 #define LOAD_DATA	0x01
689 #define INDEXED		0x02
690 #define SIGNED_DATA	0x04
691 
692 #define WORD_DATA	0x00
693 #define BYTE_DATA	0x08
694 #define HALF_DATA	0x10
695 #define INT_DATA	0x18
696 /* Separates integer and floating point registers */
697 #define GPR_REG		0x1f
698 #define DOUBLE_DATA	0x20
699 
700 #define MEM_MASK	0x7f
701 
702 /* Other inp_flags. */
703 
704 /* Integer opertion and set flags -> requires exts on 64 bit systems. */
705 #define ALT_SIGN_EXT	0x000100
706 /* This flag affects the RC() and OERC() macros. */
707 #define ALT_SET_FLAGS	0x000400
708 #define ALT_FORM1	0x001000
709 #define ALT_FORM2	0x002000
710 #define ALT_FORM3	0x004000
711 #define ALT_FORM4	0x008000
712 #define ALT_FORM5	0x010000
713 
714 /* Source and destination is register. */
715 #define REG_DEST	0x000001
716 #define REG1_SOURCE	0x000002
717 #define REG2_SOURCE	0x000004
718 /*
719 ALT_SIGN_EXT		0x000100
720 ALT_SET_FLAGS		0x000200
721 ALT_FORM1		0x001000
722 ...
723 ALT_FORM5		0x010000 */
724 
725 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
726 #include "sljitNativePPC_32.c"
727 #else
728 #include "sljitNativePPC_64.c"
729 #endif
730 
731 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
732 #define STACK_STORE	STW
733 #define STACK_LOAD	LWZ
734 #else
735 #define STACK_STORE	STD
736 #define STACK_LOAD	LD
737 #endif
738 
739 #if (defined SLJIT_PPC_STACK_FRAME_V2 && SLJIT_PPC_STACK_FRAME_V2)
740 #define LR_SAVE_OFFSET		2 * SSIZE_OF(sw)
741 #else
742 #define LR_SAVE_OFFSET		SSIZE_OF(sw)
743 #endif
744 
745 #define STACK_MAX_DISTANCE	(0x8000 - SSIZE_OF(sw) - LR_SAVE_OFFSET)
746 
747 static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg,
748 	sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg);
749 
sljit_emit_enter(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)750 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
751 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
752 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
753 {
754 	sljit_s32 i, tmp, base, offset;
755 	sljit_s32 word_arg_count = 0;
756 	sljit_s32 saved_arg_count = SLJIT_KEPT_SAVEDS_COUNT(options);
757 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
758 	sljit_s32 arg_count = 0;
759 #endif
760 
761 	CHECK_ERROR();
762 	CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
763 	set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
764 
765 	local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 0)
766 		+ GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
767 
768 	if (!(options & SLJIT_ENTER_REG_ARG))
769 		local_size += SSIZE_OF(sw);
770 
771 	local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
772 	compiler->local_size = local_size;
773 
774 	FAIL_IF(push_inst(compiler, MFLR | D(0)));
775 
776 	base = SLJIT_SP;
777 	offset = local_size;
778 
779 	if (local_size <= STACK_MAX_DISTANCE) {
780 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
781 		FAIL_IF(push_inst(compiler, STWU | S(SLJIT_SP) | A(SLJIT_SP) | IMM(-local_size)));
782 #else
783 		FAIL_IF(push_inst(compiler, STDU | S(SLJIT_SP) | A(SLJIT_SP) | IMM(-local_size)));
784 #endif
785 	} else {
786 		base = TMP_REG1;
787 		FAIL_IF(push_inst(compiler, OR | S(SLJIT_SP) | A(TMP_REG1) | B(SLJIT_SP)));
788 		FAIL_IF(load_immediate(compiler, TMP_REG2, -local_size));
789 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
790 		FAIL_IF(push_inst(compiler, STWUX | S(SLJIT_SP) | A(SLJIT_SP) | B(TMP_REG2)));
791 #else
792 		FAIL_IF(push_inst(compiler, STDUX | S(SLJIT_SP) | A(SLJIT_SP) | B(TMP_REG2)));
793 #endif
794 		local_size = 0;
795 		offset = 0;
796 	}
797 
798 	tmp = SLJIT_FS0 - fsaveds;
799 	for (i = SLJIT_FS0; i > tmp; i--) {
800 		offset -= SSIZE_OF(f64);
801 		FAIL_IF(push_inst(compiler, STFD | FS(i) | A(base) | IMM(offset)));
802 	}
803 
804 	for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
805 		offset -= SSIZE_OF(f64);
806 		FAIL_IF(push_inst(compiler, STFD | FS(i) | A(base) | IMM(offset)));
807 	}
808 
809 	if (!(options & SLJIT_ENTER_REG_ARG)) {
810 		offset -= SSIZE_OF(sw);
811 		FAIL_IF(push_inst(compiler, STACK_STORE | S(TMP_ZERO) | A(base) | IMM(offset)));
812 	}
813 
814 	tmp = SLJIT_S0 - saveds;
815 	for (i = SLJIT_S0 - saved_arg_count; i > tmp; i--) {
816 		offset -= SSIZE_OF(sw);
817 		FAIL_IF(push_inst(compiler, STACK_STORE | S(i) | A(base) | IMM(offset)));
818 	}
819 
820 	for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
821 		offset -= SSIZE_OF(sw);
822 		FAIL_IF(push_inst(compiler, STACK_STORE | S(i) | A(base) | IMM(offset)));
823 	}
824 
825 	FAIL_IF(push_inst(compiler, STACK_STORE | S(0) | A(base) | IMM(local_size + LR_SAVE_OFFSET)));
826 
827 	if (options & SLJIT_ENTER_REG_ARG)
828 		return SLJIT_SUCCESS;
829 
830 	FAIL_IF(push_inst(compiler, ADDI | D(TMP_ZERO) | A(0) | 0));
831 
832 	arg_types >>= SLJIT_ARG_SHIFT;
833 	saved_arg_count = 0;
834 
835 	while (arg_types > 0) {
836 		if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64) {
837 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
838 			do {
839 				if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
840 					tmp = SLJIT_S0 - saved_arg_count;
841 					saved_arg_count++;
842 				} else if (arg_count != word_arg_count)
843 					tmp = SLJIT_R0 + word_arg_count;
844 				else
845 					break;
846 
847 				FAIL_IF(push_inst(compiler, OR | S(SLJIT_R0 + arg_count) | A(tmp) | B(SLJIT_R0 + arg_count)));
848 			} while (0);
849 #else
850 			if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
851 				FAIL_IF(push_inst(compiler, OR | S(SLJIT_R0 + word_arg_count) | A(SLJIT_S0 - saved_arg_count) | B(SLJIT_R0 + word_arg_count)));
852 				saved_arg_count++;
853 			}
854 #endif
855 			word_arg_count++;
856 		}
857 
858 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
859 		arg_count++;
860 #endif
861 		arg_types >>= SLJIT_ARG_SHIFT;
862 	}
863 
864 	return SLJIT_SUCCESS;
865 }
866 
sljit_set_context(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)867 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
868 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
869 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
870 {
871 	CHECK_ERROR();
872 	CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
873 	set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
874 
875 	local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 0)
876 		+ GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
877 
878 	if (!(options & SLJIT_ENTER_REG_ARG))
879 		local_size += SSIZE_OF(sw);
880 
881 	compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
882 	return SLJIT_SUCCESS;
883 }
884 
emit_stack_frame_release(struct sljit_compiler * compiler,sljit_s32 is_return_to)885 static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 is_return_to)
886 {
887 	sljit_s32 i, tmp, base, offset;
888 	sljit_s32 local_size = compiler->local_size;
889 
890 	base = SLJIT_SP;
891 	if (local_size > STACK_MAX_DISTANCE) {
892 		base = TMP_REG1;
893 		if (local_size > 2 * STACK_MAX_DISTANCE + LR_SAVE_OFFSET) {
894 			FAIL_IF(push_inst(compiler, STACK_LOAD | D(base) | A(SLJIT_SP) | IMM(0)));
895 			local_size = 0;
896 		} else {
897 			FAIL_IF(push_inst(compiler, ADDI | D(TMP_REG1) | A(SLJIT_SP) | IMM(local_size - STACK_MAX_DISTANCE)));
898 			local_size = STACK_MAX_DISTANCE;
899 		}
900 	}
901 
902 	offset = local_size;
903 	if (!is_return_to)
904 		FAIL_IF(push_inst(compiler, STACK_LOAD | S(0) | A(base) | IMM(offset + LR_SAVE_OFFSET)));
905 
906 	tmp = SLJIT_FS0 - compiler->fsaveds;
907 	for (i = SLJIT_FS0; i > tmp; i--) {
908 		offset -= SSIZE_OF(f64);
909 		FAIL_IF(push_inst(compiler, LFD | FS(i) | A(base) | IMM(offset)));
910 	}
911 
912 	for (i = compiler->fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
913 		offset -= SSIZE_OF(f64);
914 		FAIL_IF(push_inst(compiler, LFD | FS(i) | A(base) | IMM(offset)));
915 	}
916 
917 	if (!(compiler->options & SLJIT_ENTER_REG_ARG)) {
918 		offset -= SSIZE_OF(sw);
919 		FAIL_IF(push_inst(compiler, STACK_LOAD | S(TMP_ZERO) | A(base) | IMM(offset)));
920 	}
921 
922 	tmp = SLJIT_S0 - compiler->saveds;
923 	for (i = SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options); i > tmp; i--) {
924 		offset -= SSIZE_OF(sw);
925 		FAIL_IF(push_inst(compiler, STACK_LOAD | S(i) | A(base) | IMM(offset)));
926 	}
927 
928 	for (i = compiler->scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
929 		offset -= SSIZE_OF(sw);
930 		FAIL_IF(push_inst(compiler, STACK_LOAD | S(i) | A(base) | IMM(offset)));
931 	}
932 
933 	if (!is_return_to)
934 		push_inst(compiler, MTLR | S(0));
935 
936 	if (local_size > 0)
937 		return push_inst(compiler, ADDI | D(SLJIT_SP) | A(base) | IMM(local_size));
938 
939 	SLJIT_ASSERT(base == TMP_REG1);
940 	return push_inst(compiler, OR | S(base) | A(SLJIT_SP) | B(base));
941 }
942 
943 #undef STACK_STORE
944 #undef STACK_LOAD
945 
sljit_emit_return_void(struct sljit_compiler * compiler)946 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
947 {
948 	CHECK_ERROR();
949 	CHECK(check_sljit_emit_return_void(compiler));
950 
951 	FAIL_IF(emit_stack_frame_release(compiler, 0));
952 	return push_inst(compiler, BLR);
953 }
954 
sljit_emit_return_to(struct sljit_compiler * compiler,sljit_s32 src,sljit_sw srcw)955 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
956 	sljit_s32 src, sljit_sw srcw)
957 {
958 	CHECK_ERROR();
959 	CHECK(check_sljit_emit_return_to(compiler, src, srcw));
960 
961 	if (src & SLJIT_MEM) {
962 		ADJUST_LOCAL_OFFSET(src, srcw);
963 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_CALL_REG, src, srcw, TMP_CALL_REG));
964 		src = TMP_CALL_REG;
965 		srcw = 0;
966 	} else if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
967 		FAIL_IF(push_inst(compiler, OR | S(src) | A(TMP_CALL_REG) | B(src)));
968 		src = TMP_CALL_REG;
969 		srcw = 0;
970 	}
971 
972 	FAIL_IF(emit_stack_frame_release(compiler, 1));
973 
974 	SLJIT_SKIP_CHECKS(compiler);
975 	return sljit_emit_ijump(compiler, SLJIT_JUMP, src, srcw);
976 }
977 
978 /* --------------------------------------------------------------------- */
979 /*  Operators                                                            */
980 /* --------------------------------------------------------------------- */
981 
982 /* s/l - store/load (1 bit)
983    i/x - immediate/indexed form
984    u/s - signed/unsigned (1 bit)
985    w/b/h/i - word/byte/half/int allowed (2 bit)
986 
987    Some opcodes are repeated (e.g. store signed / unsigned byte is the same instruction). */
988 
989 /* 64 bit only: [reg+imm] must be aligned to 4 bytes. */
990 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
991 #define INT_ALIGNED	0x10000
992 #endif
993 
994 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
995 #define ARCH_32_64(a, b)	a
996 #define INST_CODE_AND_DST(inst, flags, reg) \
997 	((sljit_ins)(inst) | (sljit_ins)(((flags) & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg)))
998 #else
999 #define ARCH_32_64(a, b)	b
1000 #define INST_CODE_AND_DST(inst, flags, reg) \
1001 	(((sljit_ins)(inst) & ~(sljit_ins)INT_ALIGNED) | (sljit_ins)(((flags) & MEM_MASK) <= GPR_REG ? D(reg) : FD(reg)))
1002 #endif
1003 
1004 static const sljit_ins data_transfer_insts[64 + 16] = {
1005 
1006 /* -------- Integer -------- */
1007 
1008 /* Word. */
1009 
1010 /* w u i s */ ARCH_32_64(HI(36) /* stw */, HI(62) | INT_ALIGNED | 0x0 /* std */),
1011 /* w u i l */ ARCH_32_64(HI(32) /* lwz */, HI(58) | INT_ALIGNED | 0x0 /* ld */),
1012 /* w u x s */ ARCH_32_64(HI(31) | LO(151) /* stwx */, HI(31) | LO(149) /* stdx */),
1013 /* w u x l */ ARCH_32_64(HI(31) | LO(23) /* lwzx */, HI(31) | LO(21) /* ldx */),
1014 
1015 /* w s i s */ ARCH_32_64(HI(36) /* stw */, HI(62) | INT_ALIGNED | 0x0 /* std */),
1016 /* w s i l */ ARCH_32_64(HI(32) /* lwz */, HI(58) | INT_ALIGNED | 0x0 /* ld */),
1017 /* w s x s */ ARCH_32_64(HI(31) | LO(151) /* stwx */, HI(31) | LO(149) /* stdx */),
1018 /* w s x l */ ARCH_32_64(HI(31) | LO(23) /* lwzx */, HI(31) | LO(21) /* ldx */),
1019 
1020 /* Byte. */
1021 
1022 /* b u i s */ HI(38) /* stb */,
1023 /* b u i l */ HI(34) /* lbz */,
1024 /* b u x s */ HI(31) | LO(215) /* stbx */,
1025 /* b u x l */ HI(31) | LO(87) /* lbzx */,
1026 
1027 /* b s i s */ HI(38) /* stb */,
1028 /* b s i l */ HI(34) /* lbz */ /* EXTS_REQ */,
1029 /* b s x s */ HI(31) | LO(215) /* stbx */,
1030 /* b s x l */ HI(31) | LO(87) /* lbzx */ /* EXTS_REQ */,
1031 
1032 /* Half. */
1033 
1034 /* h u i s */ HI(44) /* sth */,
1035 /* h u i l */ HI(40) /* lhz */,
1036 /* h u x s */ HI(31) | LO(407) /* sthx */,
1037 /* h u x l */ HI(31) | LO(279) /* lhzx */,
1038 
1039 /* h s i s */ HI(44) /* sth */,
1040 /* h s i l */ HI(42) /* lha */,
1041 /* h s x s */ HI(31) | LO(407) /* sthx */,
1042 /* h s x l */ HI(31) | LO(343) /* lhax */,
1043 
1044 /* Int. */
1045 
1046 /* i u i s */ HI(36) /* stw */,
1047 /* i u i l */ HI(32) /* lwz */,
1048 /* i u x s */ HI(31) | LO(151) /* stwx */,
1049 /* i u x l */ HI(31) | LO(23) /* lwzx */,
1050 
1051 /* i s i s */ HI(36) /* stw */,
1052 /* i s i l */ ARCH_32_64(HI(32) /* lwz */, HI(58) | INT_ALIGNED | 0x2 /* lwa */),
1053 /* i s x s */ HI(31) | LO(151) /* stwx */,
1054 /* i s x l */ ARCH_32_64(HI(31) | LO(23) /* lwzx */, HI(31) | LO(341) /* lwax */),
1055 
1056 /* -------- Floating point -------- */
1057 
1058 /* d   i s */ HI(54) /* stfd */,
1059 /* d   i l */ HI(50) /* lfd */,
1060 /* d   x s */ HI(31) | LO(727) /* stfdx */,
1061 /* d   x l */ HI(31) | LO(599) /* lfdx */,
1062 
1063 /* s   i s */ HI(52) /* stfs */,
1064 /* s   i l */ HI(48) /* lfs */,
1065 /* s   x s */ HI(31) | LO(663) /* stfsx */,
1066 /* s   x l */ HI(31) | LO(535) /* lfsx */,
1067 };
1068 
1069 static const sljit_ins updated_data_transfer_insts[64] = {
1070 
1071 /* -------- Integer -------- */
1072 
1073 /* Word. */
1074 
1075 /* w u i s */ ARCH_32_64(HI(37) /* stwu */, HI(62) | INT_ALIGNED | 0x1 /* stdu */),
1076 /* w u i l */ ARCH_32_64(HI(33) /* lwzu */, HI(58) | INT_ALIGNED | 0x1 /* ldu */),
1077 /* w u x s */ ARCH_32_64(HI(31) | LO(183) /* stwux */, HI(31) | LO(181) /* stdux */),
1078 /* w u x l */ ARCH_32_64(HI(31) | LO(55) /* lwzux */, HI(31) | LO(53) /* ldux */),
1079 
1080 /* w s i s */ ARCH_32_64(HI(37) /* stwu */, HI(62) | INT_ALIGNED | 0x1 /* stdu */),
1081 /* w s i l */ ARCH_32_64(HI(33) /* lwzu */, HI(58) | INT_ALIGNED | 0x1 /* ldu */),
1082 /* w s x s */ ARCH_32_64(HI(31) | LO(183) /* stwux */, HI(31) | LO(181) /* stdux */),
1083 /* w s x l */ ARCH_32_64(HI(31) | LO(55) /* lwzux */, HI(31) | LO(53) /* ldux */),
1084 
1085 /* Byte. */
1086 
1087 /* b u i s */ HI(39) /* stbu */,
1088 /* b u i l */ HI(35) /* lbzu */,
1089 /* b u x s */ HI(31) | LO(247) /* stbux */,
1090 /* b u x l */ HI(31) | LO(119) /* lbzux */,
1091 
1092 /* b s i s */ HI(39) /* stbu */,
1093 /* b s i l */ 0 /* no such instruction */,
1094 /* b s x s */ HI(31) | LO(247) /* stbux */,
1095 /* b s x l */ 0 /* no such instruction */,
1096 
1097 /* Half. */
1098 
1099 /* h u i s */ HI(45) /* sthu */,
1100 /* h u i l */ HI(41) /* lhzu */,
1101 /* h u x s */ HI(31) | LO(439) /* sthux */,
1102 /* h u x l */ HI(31) | LO(311) /* lhzux */,
1103 
1104 /* h s i s */ HI(45) /* sthu */,
1105 /* h s i l */ HI(43) /* lhau */,
1106 /* h s x s */ HI(31) | LO(439) /* sthux */,
1107 /* h s x l */ HI(31) | LO(375) /* lhaux */,
1108 
1109 /* Int. */
1110 
1111 /* i u i s */ HI(37) /* stwu */,
1112 /* i u i l */ HI(33) /* lwzu */,
1113 /* i u x s */ HI(31) | LO(183) /* stwux */,
1114 /* i u x l */ HI(31) | LO(55) /* lwzux */,
1115 
1116 /* i s i s */ HI(37) /* stwu */,
1117 /* i s i l */ ARCH_32_64(HI(33) /* lwzu */, 0 /* no such instruction */),
1118 /* i s x s */ HI(31) | LO(183) /* stwux */,
1119 /* i s x l */ ARCH_32_64(HI(31) | LO(55) /* lwzux */, HI(31) | LO(373) /* lwaux */),
1120 
1121 /* -------- Floating point -------- */
1122 
1123 /* d   i s */ HI(55) /* stfdu */,
1124 /* d   i l */ HI(51) /* lfdu */,
1125 /* d   x s */ HI(31) | LO(759) /* stfdux */,
1126 /* d   x l */ HI(31) | LO(631) /* lfdux */,
1127 
1128 /* s   i s */ HI(53) /* stfsu */,
1129 /* s   i l */ HI(49) /* lfsu */,
1130 /* s   x s */ HI(31) | LO(695) /* stfsux */,
1131 /* s   x l */ HI(31) | LO(567) /* lfsux */,
1132 };
1133 
1134 #undef ARCH_32_64
1135 
1136 /* Simple cases, (no caching is required). */
emit_op_mem(struct sljit_compiler * compiler,sljit_s32 inp_flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw,sljit_s32 tmp_reg)1137 static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 inp_flags, sljit_s32 reg,
1138 	sljit_s32 arg, sljit_sw argw, sljit_s32 tmp_reg)
1139 {
1140 	sljit_ins inst;
1141 	sljit_s32 offs_reg;
1142 
1143 	/* Should work when (arg & REG_MASK) == 0. */
1144 	SLJIT_ASSERT(A(0) == 0);
1145 	SLJIT_ASSERT(arg & SLJIT_MEM);
1146 
1147 	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
1148 		argw &= 0x3;
1149 		offs_reg = OFFS_REG(arg);
1150 
1151 		if (argw != 0) {
1152 			FAIL_IF(push_inst(compiler, SLWI_W(argw) | S(OFFS_REG(arg)) | A(tmp_reg)));
1153 			offs_reg = tmp_reg;
1154 		}
1155 
1156 		inst = data_transfer_insts[(inp_flags | INDEXED) & MEM_MASK];
1157 
1158 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1159 		SLJIT_ASSERT(!(inst & INT_ALIGNED));
1160 #endif /* SLJIT_CONFIG_PPC_64 */
1161 
1162 		return push_inst(compiler, INST_CODE_AND_DST(inst, inp_flags, reg) | A(arg & REG_MASK) | B(offs_reg));
1163 	}
1164 
1165 	inst = data_transfer_insts[inp_flags & MEM_MASK];
1166 	arg &= REG_MASK;
1167 
1168 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1169 	if ((inst & INT_ALIGNED) && (argw & 0x3) != 0) {
1170 		FAIL_IF(load_immediate(compiler, tmp_reg, argw));
1171 
1172 		inst = data_transfer_insts[(inp_flags | INDEXED) & MEM_MASK];
1173 		return push_inst(compiler, INST_CODE_AND_DST(inst, inp_flags, reg) | A(arg) | B(tmp_reg));
1174 	}
1175 #endif /* SLJIT_CONFIG_PPC_64 */
1176 
1177 	if (argw <= SIMM_MAX && argw >= SIMM_MIN)
1178 		return push_inst(compiler, INST_CODE_AND_DST(inst, inp_flags, reg) | A(arg) | IMM(argw));
1179 
1180 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1181 	if (argw <= 0x7fff7fffl && argw >= -0x80000000l) {
1182 #endif /* SLJIT_CONFIG_PPC_64 */
1183 		FAIL_IF(push_inst(compiler, ADDIS | D(tmp_reg) | A(arg) | IMM((argw + 0x8000) >> 16)));
1184 		return push_inst(compiler, INST_CODE_AND_DST(inst, inp_flags, reg) | A(tmp_reg) | IMM(argw));
1185 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1186 	}
1187 
1188 	FAIL_IF(load_immediate(compiler, tmp_reg, argw));
1189 
1190 	inst = data_transfer_insts[(inp_flags | INDEXED) & MEM_MASK];
1191 	return push_inst(compiler, INST_CODE_AND_DST(inst, inp_flags, reg) | A(arg) | B(tmp_reg));
1192 #endif /* SLJIT_CONFIG_PPC_64 */
1193 }
1194 
emit_op(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 input_flags,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1195 static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 input_flags,
1196 	sljit_s32 dst, sljit_sw dstw,
1197 	sljit_s32 src1, sljit_sw src1w,
1198 	sljit_s32 src2, sljit_sw src2w)
1199 {
1200 	/* arg1 goes to TMP_REG1 or src reg
1201 	   arg2 goes to TMP_REG2, imm or src reg
1202 	   result goes to TMP_REG2, so put result can use TMP_REG1. */
1203 	sljit_s32 dst_r = TMP_REG2;
1204 	sljit_s32 src1_r;
1205 	sljit_s32 src2_r;
1206 	sljit_s32 sugg_src2_r = TMP_REG2;
1207 	sljit_s32 flags = input_flags & (ALT_FORM1 | ALT_FORM2 | ALT_FORM3 | ALT_FORM4 | ALT_FORM5 | ALT_SIGN_EXT | ALT_SET_FLAGS);
1208 
1209 	/* Destination check. */
1210 	if (FAST_IS_REG(dst)) {
1211 		dst_r = dst;
1212 		/* The REG_DEST is only used by SLJIT_MOV operations, although
1213 		 * it is set for op2 operations with unset destination. */
1214 		flags |= REG_DEST;
1215 
1216 		if (op >= SLJIT_MOV && op <= SLJIT_MOV_P)
1217 			sugg_src2_r = dst_r;
1218 	}
1219 
1220 	/* Source 1. */
1221 	if (FAST_IS_REG(src1)) {
1222 		src1_r = src1;
1223 		flags |= REG1_SOURCE;
1224 	}
1225 	else if (src1 & SLJIT_IMM) {
1226 		src1_r = TMP_ZERO;
1227 		if (src1w != 0) {
1228 			FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
1229 			src1_r = TMP_REG1;
1230 		}
1231 	}
1232 	else {
1233 		FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, TMP_REG1, src1, src1w, TMP_REG1));
1234 		src1_r = TMP_REG1;
1235 	}
1236 
1237 	/* Source 2. */
1238 	if (FAST_IS_REG(src2)) {
1239 		src2_r = src2;
1240 		flags |= REG2_SOURCE;
1241 
1242 		if (!(flags & REG_DEST) && op >= SLJIT_MOV && op <= SLJIT_MOV_P)
1243 			dst_r = src2_r;
1244 	}
1245 	else if (src2 & SLJIT_IMM) {
1246 		src2_r = TMP_ZERO;
1247 		if (src2w != 0) {
1248 			FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w));
1249 			src2_r = sugg_src2_r;
1250 		}
1251 	}
1252 	else {
1253 		FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, sugg_src2_r, src2, src2w, TMP_REG2));
1254 		src2_r = sugg_src2_r;
1255 	}
1256 
1257 	FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
1258 
1259 	if (!(dst & SLJIT_MEM))
1260 		return SLJIT_SUCCESS;
1261 
1262 	return emit_op_mem(compiler, input_flags, dst_r, dst, dstw, TMP_REG1);
1263 }
1264 
sljit_emit_op0(struct sljit_compiler * compiler,sljit_s32 op)1265 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1266 {
1267 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1268 	sljit_s32 int_op = op & SLJIT_32;
1269 #endif
1270 
1271 	CHECK_ERROR();
1272 	CHECK(check_sljit_emit_op0(compiler, op));
1273 
1274 	op = GET_OPCODE(op);
1275 	switch (op) {
1276 	case SLJIT_BREAKPOINT:
1277 	case SLJIT_NOP:
1278 		return push_inst(compiler, NOP);
1279 	case SLJIT_LMUL_UW:
1280 	case SLJIT_LMUL_SW:
1281 		FAIL_IF(push_inst(compiler, OR | S(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R0)));
1282 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1283 		FAIL_IF(push_inst(compiler, MULLD | D(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R1)));
1284 		return push_inst(compiler, (op == SLJIT_LMUL_UW ? MULHDU : MULHD) | D(SLJIT_R1) | A(TMP_REG1) | B(SLJIT_R1));
1285 #else
1286 		FAIL_IF(push_inst(compiler, MULLW | D(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R1)));
1287 		return push_inst(compiler, (op == SLJIT_LMUL_UW ? MULHWU : MULHW) | D(SLJIT_R1) | A(TMP_REG1) | B(SLJIT_R1));
1288 #endif
1289 	case SLJIT_DIVMOD_UW:
1290 	case SLJIT_DIVMOD_SW:
1291 		FAIL_IF(push_inst(compiler, OR | S(SLJIT_R0) | A(TMP_REG1) | B(SLJIT_R0)));
1292 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1293 		FAIL_IF(push_inst(compiler, (int_op ? (op == SLJIT_DIVMOD_UW ? DIVWU : DIVW) : (op == SLJIT_DIVMOD_UW ? DIVDU : DIVD)) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1)));
1294 		FAIL_IF(push_inst(compiler, (int_op ? MULLW : MULLD) | D(SLJIT_R1) | A(SLJIT_R0) | B(SLJIT_R1)));
1295 #else
1296 		FAIL_IF(push_inst(compiler, (op == SLJIT_DIVMOD_UW ? DIVWU : DIVW) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1)));
1297 		FAIL_IF(push_inst(compiler, MULLW | D(SLJIT_R1) | A(SLJIT_R0) | B(SLJIT_R1)));
1298 #endif
1299 		return push_inst(compiler, SUBF | D(SLJIT_R1) | A(SLJIT_R1) | B(TMP_REG1));
1300 	case SLJIT_DIV_UW:
1301 	case SLJIT_DIV_SW:
1302 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1303 		return push_inst(compiler, (int_op ? (op == SLJIT_DIV_UW ? DIVWU : DIVW) : (op == SLJIT_DIV_UW ? DIVDU : DIVD)) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1));
1304 #else
1305 		return push_inst(compiler, (op == SLJIT_DIV_UW ? DIVWU : DIVW) | D(SLJIT_R0) | A(SLJIT_R0) | B(SLJIT_R1));
1306 #endif
1307 	case SLJIT_ENDBR:
1308 	case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1309 		return SLJIT_SUCCESS;
1310 	}
1311 
1312 	return SLJIT_SUCCESS;
1313 }
1314 
emit_prefetch(struct sljit_compiler * compiler,sljit_s32 src,sljit_sw srcw)1315 static sljit_s32 emit_prefetch(struct sljit_compiler *compiler,
1316         sljit_s32 src, sljit_sw srcw)
1317 {
1318 	if (!(src & OFFS_REG_MASK)) {
1319 		if (srcw == 0 && (src & REG_MASK))
1320 			return push_inst(compiler, DCBT | A(0) | B(src & REG_MASK));
1321 
1322 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1323 		/* Works with SLJIT_MEM0() case as well. */
1324 		return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
1325 	}
1326 
1327 	srcw &= 0x3;
1328 
1329 	if (srcw == 0)
1330 		return push_inst(compiler, DCBT | A(src & REG_MASK) | B(OFFS_REG(src)));
1331 
1332 	FAIL_IF(push_inst(compiler, SLWI_W(srcw) | S(OFFS_REG(src)) | A(TMP_REG1)));
1333 	return push_inst(compiler, DCBT | A(src & REG_MASK) | B(TMP_REG1));
1334 }
1335 
1336 #define EMIT_MOV(type, type_flags, type_cast) \
1337 	emit_op(compiler, (src & SLJIT_IMM) ? SLJIT_MOV : type, flags | (type_flags), dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? type_cast srcw : srcw)
1338 
sljit_emit_op1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1339 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1340 	sljit_s32 dst, sljit_sw dstw,
1341 	sljit_s32 src, sljit_sw srcw)
1342 {
1343 	sljit_s32 flags = HAS_FLAGS(op) ? ALT_SET_FLAGS : 0;
1344 	sljit_s32 op_flags = GET_ALL_FLAGS(op);
1345 
1346 	CHECK_ERROR();
1347 	CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1348 	ADJUST_LOCAL_OFFSET(dst, dstw);
1349 	ADJUST_LOCAL_OFFSET(src, srcw);
1350 
1351 	op = GET_OPCODE(op);
1352 
1353 	if (GET_FLAG_TYPE(op_flags) == SLJIT_OVERFLOW)
1354 		FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
1355 
1356 	if (op < SLJIT_NOT && FAST_IS_REG(src) && src == dst) {
1357 		if (!TYPE_CAST_NEEDED(op))
1358 			return SLJIT_SUCCESS;
1359 	}
1360 
1361 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1362 	if (op_flags & SLJIT_32) {
1363 		if (op < SLJIT_NOT) {
1364 			if (src & SLJIT_MEM) {
1365 				if (op == SLJIT_MOV_S32)
1366 					op = SLJIT_MOV_U32;
1367 			}
1368 			else if (src & SLJIT_IMM) {
1369 				if (op == SLJIT_MOV_U32)
1370 					op = SLJIT_MOV_S32;
1371 			}
1372 		}
1373 		else {
1374 			/* Most operations expect sign extended arguments. */
1375 			flags |= INT_DATA | SIGNED_DATA;
1376 			if (HAS_FLAGS(op_flags))
1377 				flags |= ALT_SIGN_EXT;
1378 		}
1379 	}
1380 #endif
1381 
1382 	switch (op) {
1383 	case SLJIT_MOV:
1384 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
1385 	case SLJIT_MOV_U32:
1386 	case SLJIT_MOV_S32:
1387 	case SLJIT_MOV32:
1388 #endif
1389 	case SLJIT_MOV_P:
1390 		return emit_op(compiler, SLJIT_MOV, flags | WORD_DATA, dst, dstw, TMP_REG1, 0, src, srcw);
1391 
1392 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1393 	case SLJIT_MOV_U32:
1394 		return EMIT_MOV(SLJIT_MOV_U32, INT_DATA, (sljit_u32));
1395 
1396 	case SLJIT_MOV_S32:
1397 	case SLJIT_MOV32:
1398 		return EMIT_MOV(SLJIT_MOV_S32, INT_DATA | SIGNED_DATA, (sljit_s32));
1399 #endif
1400 
1401 	case SLJIT_MOV_U8:
1402 		return EMIT_MOV(SLJIT_MOV_U8, BYTE_DATA, (sljit_u8));
1403 
1404 	case SLJIT_MOV_S8:
1405 		return EMIT_MOV(SLJIT_MOV_S8, BYTE_DATA | SIGNED_DATA, (sljit_s8));
1406 
1407 	case SLJIT_MOV_U16:
1408 		return EMIT_MOV(SLJIT_MOV_U16, HALF_DATA, (sljit_u16));
1409 
1410 	case SLJIT_MOV_S16:
1411 		return EMIT_MOV(SLJIT_MOV_S16, HALF_DATA | SIGNED_DATA, (sljit_s16));
1412 
1413 	case SLJIT_NOT:
1414 		return emit_op(compiler, SLJIT_NOT, flags, dst, dstw, TMP_REG1, 0, src, srcw);
1415 
1416 	case SLJIT_CLZ:
1417 	case SLJIT_CTZ:
1418 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1419 		return emit_op(compiler, op, flags | (!(op_flags & SLJIT_32) ? 0 : ALT_FORM1), dst, dstw, TMP_REG1, 0, src, srcw);
1420 #else
1421 		return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
1422 #endif
1423 	}
1424 
1425 	return SLJIT_SUCCESS;
1426 }
1427 
1428 #undef EMIT_MOV
1429 
1430 #define TEST_SL_IMM(src, srcw) \
1431 	(((src) & SLJIT_IMM) && (srcw) <= SIMM_MAX && (srcw) >= SIMM_MIN)
1432 
1433 #define TEST_UL_IMM(src, srcw) \
1434 	(((src) & SLJIT_IMM) && !((srcw) & ~0xffff))
1435 
1436 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1437 #define TEST_SH_IMM(src, srcw) \
1438 	(((src) & SLJIT_IMM) && !((srcw) & 0xffff) && (srcw) <= 0x7fffffffl && (srcw) >= -0x80000000l)
1439 #else
1440 #define TEST_SH_IMM(src, srcw) \
1441 	(((src) & SLJIT_IMM) && !((srcw) & 0xffff))
1442 #endif
1443 
1444 #define TEST_UH_IMM(src, srcw) \
1445 	(((src) & SLJIT_IMM) && !((srcw) & ~(sljit_sw)0xffff0000))
1446 
1447 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1448 #define TEST_ADD_IMM(src, srcw) \
1449 	(((src) & SLJIT_IMM) && (srcw) <= 0x7fff7fffl && (srcw) >= -0x80000000l)
1450 #else
1451 #define TEST_ADD_IMM(src, srcw) \
1452 	((src) & SLJIT_IMM)
1453 #endif
1454 
1455 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1456 #define TEST_UI_IMM(src, srcw) \
1457 	(((src) & SLJIT_IMM) && !((srcw) & ~0xffffffff))
1458 #else
1459 #define TEST_UI_IMM(src, srcw) \
1460 	((src) & SLJIT_IMM)
1461 #endif
1462 
1463 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1464 #define TEST_ADD_FORM1(op) \
1465 	(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW \
1466 		|| (op & (SLJIT_32 | SLJIT_SET_Z | VARIABLE_FLAG_MASK)) == (SLJIT_32 | SLJIT_SET_Z | SLJIT_SET_CARRY))
1467 #define TEST_SUB_FORM2(op) \
1468 	((GET_FLAG_TYPE(op) >= SLJIT_SIG_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) \
1469 		|| (op & (SLJIT_32 | SLJIT_SET_Z | VARIABLE_FLAG_MASK)) == (SLJIT_32 | SLJIT_SET_Z))
1470 #define TEST_SUB_FORM3(op) \
1471 	(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW \
1472 		|| (op & (SLJIT_32 | SLJIT_SET_Z)) == (SLJIT_32 | SLJIT_SET_Z))
1473 #else
1474 #define TEST_ADD_FORM1(op) \
1475 	(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
1476 #define TEST_SUB_FORM2(op) \
1477 	(GET_FLAG_TYPE(op) >= SLJIT_SIG_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL)
1478 #define TEST_SUB_FORM3(op) \
1479 	(GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
1480 #endif
1481 
sljit_emit_op2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1482 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
1483 	sljit_s32 dst, sljit_sw dstw,
1484 	sljit_s32 src1, sljit_sw src1w,
1485 	sljit_s32 src2, sljit_sw src2w)
1486 {
1487 	sljit_s32 flags = HAS_FLAGS(op) ? ALT_SET_FLAGS : 0;
1488 
1489 	CHECK_ERROR();
1490 	CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
1491 	ADJUST_LOCAL_OFFSET(dst, dstw);
1492 	ADJUST_LOCAL_OFFSET(src1, src1w);
1493 	ADJUST_LOCAL_OFFSET(src2, src2w);
1494 
1495 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1496 	if (op & SLJIT_32) {
1497 		/* Most operations expect sign extended arguments. */
1498 		flags |= INT_DATA | SIGNED_DATA;
1499 		if (src1 & SLJIT_IMM)
1500 			src1w = (sljit_s32)(src1w);
1501 		if (src2 & SLJIT_IMM)
1502 			src2w = (sljit_s32)(src2w);
1503 		if (HAS_FLAGS(op))
1504 			flags |= ALT_SIGN_EXT;
1505 	}
1506 #endif
1507 	if (GET_FLAG_TYPE(op) == SLJIT_OVERFLOW)
1508 		FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
1509 
1510 	switch (GET_OPCODE(op)) {
1511 	case SLJIT_ADD:
1512 		compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
1513 
1514 		if (TEST_ADD_FORM1(op))
1515 			return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w);
1516 
1517 		if (!HAS_FLAGS(op) && ((src1 | src2) & SLJIT_IMM)) {
1518 			if (TEST_SL_IMM(src2, src2w)) {
1519 				compiler->imm = (sljit_ins)src2w & 0xffff;
1520 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
1521 			}
1522 			if (TEST_SL_IMM(src1, src1w)) {
1523 				compiler->imm = (sljit_ins)src1w & 0xffff;
1524 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0);
1525 			}
1526 			if (TEST_SH_IMM(src2, src2w)) {
1527 				compiler->imm = (sljit_ins)(src2w >> 16) & 0xffff;
1528 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
1529 			}
1530 			if (TEST_SH_IMM(src1, src1w)) {
1531 				compiler->imm = (sljit_ins)(src1w >> 16) & 0xffff;
1532 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
1533 			}
1534 			/* Range between -1 and -32768 is covered above. */
1535 			if (TEST_ADD_IMM(src2, src2w)) {
1536 				compiler->imm = (sljit_ins)src2w & 0xffffffff;
1537 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
1538 			}
1539 			if (TEST_ADD_IMM(src1, src1w)) {
1540 				compiler->imm = (sljit_ins)src1w & 0xffffffff;
1541 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src2, src2w, TMP_REG2, 0);
1542 			}
1543 		}
1544 
1545 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1546 		if ((op & (SLJIT_32 | SLJIT_SET_Z)) == (SLJIT_32 | SLJIT_SET_Z)) {
1547 			if (TEST_SL_IMM(src2, src2w)) {
1548 				compiler->imm = (sljit_ins)src2w & 0xffff;
1549 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4 | ALT_FORM5, dst, dstw, src1, src1w, TMP_REG2, 0);
1550 			}
1551 			if (TEST_SL_IMM(src1, src1w)) {
1552 				compiler->imm = (sljit_ins)src1w & 0xffff;
1553 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4 | ALT_FORM5, dst, dstw, src2, src2w, TMP_REG2, 0);
1554 			}
1555 			return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM4, dst, dstw, src1, src1w, src2, src2w);
1556 		}
1557 #endif
1558 		if (HAS_FLAGS(op)) {
1559 			if (TEST_SL_IMM(src2, src2w)) {
1560 				compiler->imm = (sljit_ins)src2w & 0xffff;
1561 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
1562 			}
1563 			if (TEST_SL_IMM(src1, src1w)) {
1564 				compiler->imm = (sljit_ins)src1w & 0xffff;
1565 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
1566 			}
1567 		}
1568 		return emit_op(compiler, SLJIT_ADD, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
1569 
1570 	case SLJIT_ADDC:
1571 		compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
1572 		return emit_op(compiler, SLJIT_ADDC, flags, dst, dstw, src1, src1w, src2, src2w);
1573 
1574 	case SLJIT_SUB:
1575 		compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
1576 
1577 		if (GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_LESS_EQUAL) {
1578 			if (dst == TMP_REG2) {
1579 				if (TEST_UL_IMM(src2, src2w)) {
1580 					compiler->imm = (sljit_ins)src2w & 0xffff;
1581 					return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
1582 				}
1583 				return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1, dst, dstw, src1, src1w, src2, src2w);
1584 			}
1585 
1586 			if ((src2 & SLJIT_IMM) && src2w >= 0 && src2w <= (SIMM_MAX + 1)) {
1587 				compiler->imm = (sljit_ins)src2w;
1588 				return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
1589 			}
1590 			return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM1 | ALT_FORM3, dst, dstw, src1, src1w, src2, src2w);
1591 		}
1592 
1593 		if (dst == TMP_REG2 && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
1594 			if (TEST_SL_IMM(src2, src2w)) {
1595 				compiler->imm = (sljit_ins)src2w & 0xffff;
1596 				return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
1597 			}
1598 			return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2, dst, dstw, src1, src1w, src2, src2w);
1599 		}
1600 
1601 		if (TEST_SUB_FORM2(op)) {
1602 			if ((src2 & SLJIT_IMM) && src2w >= -SIMM_MAX && src2w <= SIMM_MAX) {
1603 				compiler->imm = (sljit_ins)src2w & 0xffff;
1604 				return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM3 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
1605 			}
1606 			return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, src2, src2w);
1607 		}
1608 
1609 		if (TEST_SUB_FORM3(op))
1610 			return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM3, dst, dstw, src1, src1w, src2, src2w);
1611 
1612 		if (TEST_SL_IMM(src2, -src2w)) {
1613 			compiler->imm = (sljit_ins)(-src2w) & 0xffff;
1614 			return emit_op(compiler, SLJIT_ADD, flags | (!HAS_FLAGS(op) ? ALT_FORM2 : ALT_FORM3), dst, dstw, src1, src1w, TMP_REG2, 0);
1615 		}
1616 
1617 		if (TEST_SL_IMM(src1, src1w) && !(op & SLJIT_SET_Z)) {
1618 			compiler->imm = (sljit_ins)src1w & 0xffff;
1619 			return emit_op(compiler, SLJIT_SUB, flags | ALT_FORM4, dst, dstw, src2, src2w, TMP_REG2, 0);
1620 		}
1621 
1622 		if (!HAS_FLAGS(op)) {
1623 			if (TEST_SH_IMM(src2, -src2w)) {
1624 				compiler->imm = (sljit_ins)((-src2w) >> 16) & 0xffff;
1625 				return emit_op(compiler, SLJIT_ADD, flags |  ALT_FORM2 | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
1626 			}
1627 			/* Range between -1 and -32768 is covered above. */
1628 			if (TEST_ADD_IMM(src2, -src2w)) {
1629 				compiler->imm = (sljit_ins)-src2w;
1630 				return emit_op(compiler, SLJIT_ADD, flags | ALT_FORM2 | ALT_FORM4, dst, dstw, src1, src1w, TMP_REG2, 0);
1631 			}
1632 		}
1633 
1634 		/* We know ALT_SIGN_EXT is set if it is an SLJIT_32 on 64 bit systems. */
1635 		return emit_op(compiler, SLJIT_SUB, flags | ((GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY)) ? ALT_FORM5 : 0), dst, dstw, src1, src1w, src2, src2w);
1636 
1637 	case SLJIT_SUBC:
1638 		compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
1639 		return emit_op(compiler, SLJIT_SUBC, flags, dst, dstw, src1, src1w, src2, src2w);
1640 
1641 	case SLJIT_MUL:
1642 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1643 		if (op & SLJIT_32)
1644 			flags |= ALT_FORM2;
1645 #endif
1646 		if (!HAS_FLAGS(op)) {
1647 			if (TEST_SL_IMM(src2, src2w)) {
1648 				compiler->imm = (sljit_ins)src2w & 0xffff;
1649 				return emit_op(compiler, SLJIT_MUL, flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
1650 			}
1651 			if (TEST_SL_IMM(src1, src1w)) {
1652 				compiler->imm = (sljit_ins)src1w & 0xffff;
1653 				return emit_op(compiler, SLJIT_MUL, flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0);
1654 			}
1655 		}
1656 		else
1657 			FAIL_IF(push_inst(compiler, MTXER | S(TMP_ZERO)));
1658 		return emit_op(compiler, SLJIT_MUL, flags, dst, dstw, src1, src1w, src2, src2w);
1659 
1660 	case SLJIT_AND:
1661 	case SLJIT_OR:
1662 	case SLJIT_XOR:
1663 		/* Commutative unsigned operations. */
1664 		if (!HAS_FLAGS(op) || GET_OPCODE(op) == SLJIT_AND) {
1665 			if (TEST_UL_IMM(src2, src2w)) {
1666 				compiler->imm = (sljit_ins)src2w;
1667 				return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
1668 			}
1669 			if (TEST_UL_IMM(src1, src1w)) {
1670 				compiler->imm = (sljit_ins)src1w;
1671 				return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM1, dst, dstw, src2, src2w, TMP_REG2, 0);
1672 			}
1673 			if (TEST_UH_IMM(src2, src2w)) {
1674 				compiler->imm = (sljit_ins)(src2w >> 16) & 0xffff;
1675 				return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM2, dst, dstw, src1, src1w, TMP_REG2, 0);
1676 			}
1677 			if (TEST_UH_IMM(src1, src1w)) {
1678 				compiler->imm = (sljit_ins)(src1w >> 16) & 0xffff;
1679 				return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM2, dst, dstw, src2, src2w, TMP_REG2, 0);
1680 			}
1681 		}
1682 		if (!HAS_FLAGS(op) && GET_OPCODE(op) != SLJIT_AND) {
1683 			/* Unlike or and xor, the and resets unwanted bits as well. */
1684 			if (TEST_UI_IMM(src2, src2w)) {
1685 				compiler->imm = (sljit_ins)src2w;
1686 				return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM3, dst, dstw, src1, src1w, TMP_REG2, 0);
1687 			}
1688 			if (TEST_UI_IMM(src1, src1w)) {
1689 				compiler->imm = (sljit_ins)src1w;
1690 				return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM3, dst, dstw, src2, src2w, TMP_REG2, 0);
1691 			}
1692 		}
1693 		return emit_op(compiler, GET_OPCODE(op), flags, dst, dstw, src1, src1w, src2, src2w);
1694 
1695 	case SLJIT_SHL:
1696 	case SLJIT_MSHL:
1697 	case SLJIT_LSHR:
1698 	case SLJIT_MLSHR:
1699 	case SLJIT_ASHR:
1700 	case SLJIT_MASHR:
1701 	case SLJIT_ROTL:
1702 	case SLJIT_ROTR:
1703 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1704 		if (op & SLJIT_32)
1705 			flags |= ALT_FORM2;
1706 #endif
1707 		if (src2 & SLJIT_IMM) {
1708 			compiler->imm = (sljit_ins)src2w;
1709 			return emit_op(compiler, GET_OPCODE(op), flags | ALT_FORM1, dst, dstw, src1, src1w, TMP_REG2, 0);
1710 		}
1711 		return emit_op(compiler, GET_OPCODE(op), flags, dst, dstw, src1, src1w, src2, src2w);
1712 	}
1713 
1714 	return SLJIT_SUCCESS;
1715 }
1716 
sljit_emit_op2u(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1717 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
1718 	sljit_s32 src1, sljit_sw src1w,
1719 	sljit_s32 src2, sljit_sw src2w)
1720 {
1721 	CHECK_ERROR();
1722 	CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
1723 
1724 	SLJIT_SKIP_CHECKS(compiler);
1725 	return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
1726 }
1727 
1728 #undef TEST_ADD_FORM1
1729 #undef TEST_SUB_FORM2
1730 #undef TEST_SUB_FORM3
1731 
sljit_emit_shift_into(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src_dst,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1732 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
1733 	sljit_s32 src_dst,
1734 	sljit_s32 src1, sljit_sw src1w,
1735 	sljit_s32 src2, sljit_sw src2w)
1736 {
1737 	sljit_s32 is_right;
1738 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1739 	sljit_s32 inp_flags = ((op & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
1740 	sljit_sw bit_length = (op & SLJIT_32) ? 32 : 64;
1741 #else /* !SLJIT_CONFIG_PPC_64 */
1742 	sljit_s32 inp_flags = WORD_DATA | LOAD_DATA;
1743 	sljit_sw bit_length = 32;
1744 #endif /* SLJIT_CONFIG_PPC_64 */
1745 
1746 	CHECK_ERROR();
1747 	CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
1748 
1749 	is_right = (GET_OPCODE(op) == SLJIT_LSHR || GET_OPCODE(op) == SLJIT_MLSHR);
1750 
1751 	if (src_dst == src1) {
1752 		SLJIT_SKIP_CHECKS(compiler);
1753 		return sljit_emit_op2(compiler, (is_right ? SLJIT_ROTR : SLJIT_ROTL) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
1754 	}
1755 
1756 	ADJUST_LOCAL_OFFSET(src1, src1w);
1757 	ADJUST_LOCAL_OFFSET(src2, src2w);
1758 
1759 	if (src2 & SLJIT_IMM) {
1760 		src2w &= bit_length - 1;
1761 
1762 		if (src2w == 0)
1763 			return SLJIT_SUCCESS;
1764 	} else if (src2 & SLJIT_MEM) {
1765 		FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src2, src2w, TMP_REG2));
1766 		src2 = TMP_REG2;
1767 	}
1768 
1769 	if (src1 & SLJIT_MEM) {
1770 		FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG1, src1, src1w, TMP_REG1));
1771 		src1 = TMP_REG1;
1772 	} else if (src1 & SLJIT_IMM) {
1773 		FAIL_IF(load_immediate(compiler, TMP_REG1, src1w));
1774 		src1 = TMP_REG1;
1775 	}
1776 
1777 	if (src2 & SLJIT_IMM) {
1778 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1779 		if (!(op & SLJIT_32)) {
1780 			if (is_right) {
1781 				FAIL_IF(push_inst(compiler, SRDI(src2w) | S(src_dst) | A(src_dst)));
1782 				return push_inst(compiler, RLDIMI | S(src1) | A(src_dst) | RLDI_SH(64 - src2w) | RLDI_MB(0));
1783 			}
1784 
1785 			FAIL_IF(push_inst(compiler, SLDI(src2w) | S(src_dst) | A(src_dst)));
1786 			/* Computes SRDI(64 - src2w). */
1787 			FAIL_IF(push_inst(compiler, RLDICL | S(src1) | A(TMP_REG1) | RLDI_SH(src2w) | RLDI_MB(64 - src2w)));
1788 			return push_inst(compiler, OR | S(src_dst) | A(src_dst) | B(TMP_REG1));
1789 		}
1790 #endif /* SLJIT_CONFIG_PPC_64 */
1791 
1792 		if (is_right) {
1793 			FAIL_IF(push_inst(compiler, SRWI(src2w) | S(src_dst) | A(src_dst)));
1794 			return push_inst(compiler, RLWIMI | S(src1) | A(src_dst) | RLWI_SH(32 - src2w) | RLWI_MBE(0, src2w - 1));
1795 		}
1796 
1797 		FAIL_IF(push_inst(compiler, SLWI(src2w) | S(src_dst) | A(src_dst)));
1798 		return push_inst(compiler, RLWIMI | S(src1) | A(src_dst) | RLWI_SH(src2w) | RLWI_MBE(32 - src2w, 31));
1799 	}
1800 
1801 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1802 	if (!(op & SLJIT_32)) {
1803 		if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR) {
1804 			FAIL_IF(push_inst(compiler, ANDI | S(src2) | A(TMP_REG2) | 0x3f));
1805 			src2 = TMP_REG2;
1806 		}
1807 
1808 		FAIL_IF(push_inst(compiler, (is_right ? SRD : SLD) | S(src_dst) | A(src_dst) | B(src2)));
1809 		FAIL_IF(push_inst(compiler, (is_right ? SLDI(1) : SRDI(1)) | S(src1) | A(TMP_REG1)));
1810 		FAIL_IF(push_inst(compiler, XORI | S(src2) | A(TMP_REG2) | 0x3f));
1811 		FAIL_IF(push_inst(compiler, (is_right ? SLD : SRD) | S(TMP_REG1) | A(TMP_REG1) | B(TMP_REG2)));
1812 		return push_inst(compiler, OR | S(src_dst) | A(src_dst) | B(TMP_REG1));
1813 	}
1814 #endif /* SLJIT_CONFIG_PPC_64 */
1815 
1816 	if (GET_OPCODE(op) == SLJIT_MSHL || GET_OPCODE(op) == SLJIT_MLSHR) {
1817 		FAIL_IF(push_inst(compiler, ANDI | S(src2) | A(TMP_REG2) | 0x1f));
1818 		src2 = TMP_REG2;
1819 	}
1820 
1821 	FAIL_IF(push_inst(compiler, (is_right ? SRW : SLW) | S(src_dst) | A(src_dst) | B(src2)));
1822 	FAIL_IF(push_inst(compiler, (is_right ? SLWI(1) : SRWI(1)) | S(src1) | A(TMP_REG1)));
1823 	FAIL_IF(push_inst(compiler, XORI | S(src2) | A(TMP_REG2) | 0x1f));
1824 	FAIL_IF(push_inst(compiler, (is_right ? SLW : SRW) | S(TMP_REG1) | A(TMP_REG1) | B(TMP_REG2)));
1825 	return push_inst(compiler, OR | S(src_dst) | A(src_dst) | B(TMP_REG1));
1826 }
1827 
sljit_emit_op_src(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)1828 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
1829 	sljit_s32 src, sljit_sw srcw)
1830 {
1831 	CHECK_ERROR();
1832 	CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
1833 	ADJUST_LOCAL_OFFSET(src, srcw);
1834 
1835 	switch (op) {
1836 	case SLJIT_FAST_RETURN:
1837 		if (FAST_IS_REG(src))
1838 			FAIL_IF(push_inst(compiler, MTLR | S(src)));
1839 		else {
1840 			FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG2, src, srcw, TMP_REG2));
1841 			FAIL_IF(push_inst(compiler, MTLR | S(TMP_REG2)));
1842 		}
1843 
1844 		return push_inst(compiler, BLR);
1845 	case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
1846 		return SLJIT_SUCCESS;
1847 	case SLJIT_PREFETCH_L1:
1848 	case SLJIT_PREFETCH_L2:
1849 	case SLJIT_PREFETCH_L3:
1850 	case SLJIT_PREFETCH_ONCE:
1851 		return emit_prefetch(compiler, src, srcw);
1852 	}
1853 
1854 	return SLJIT_SUCCESS;
1855 }
1856 
sljit_get_register_index(sljit_s32 reg)1857 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
1858 {
1859 	CHECK_REG_INDEX(check_sljit_get_register_index(reg));
1860 	return reg_map[reg];
1861 }
1862 
sljit_get_float_register_index(sljit_s32 reg)1863 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
1864 {
1865 	CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
1866 	return freg_map[reg];
1867 }
1868 
sljit_emit_op_custom(struct sljit_compiler * compiler,void * instruction,sljit_u32 size)1869 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
1870 	void *instruction, sljit_u32 size)
1871 {
1872 	CHECK_ERROR();
1873 	CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
1874 
1875 	return push_inst(compiler, *(sljit_ins*)instruction);
1876 }
1877 
1878 /* --------------------------------------------------------------------- */
1879 /*  Floating point operators                                             */
1880 /* --------------------------------------------------------------------- */
1881 
1882 #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 6))
1883 #define SELECT_FOP(op, single, double) ((sljit_ins)((op & SLJIT_32) ? single : double))
1884 
1885 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1886 #define FLOAT_TMP_MEM_OFFSET (6 * sizeof(sljit_sw))
1887 #else
1888 #define FLOAT_TMP_MEM_OFFSET (2 * sizeof(sljit_sw))
1889 
1890 #if (defined SLJIT_LITTLE_ENDIAN && SLJIT_LITTLE_ENDIAN)
1891 #define FLOAT_TMP_MEM_OFFSET_LOW (2 * sizeof(sljit_sw))
1892 #define FLOAT_TMP_MEM_OFFSET_HI (3 * sizeof(sljit_sw))
1893 #else
1894 #define FLOAT_TMP_MEM_OFFSET_LOW (3 * sizeof(sljit_sw))
1895 #define FLOAT_TMP_MEM_OFFSET_HI (2 * sizeof(sljit_sw))
1896 #endif
1897 
1898 #endif /* SLJIT_CONFIG_PPC_64 */
1899 
sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1900 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
1901 	sljit_s32 dst, sljit_sw dstw,
1902 	sljit_s32 src, sljit_sw srcw)
1903 {
1904 	if (src & SLJIT_MEM) {
1905 		/* We can ignore the temporary data store on the stack from caching point of view. */
1906 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, TMP_REG1));
1907 		src = TMP_FREG1;
1908 	}
1909 
1910 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1911 	op = GET_OPCODE(op);
1912 	FAIL_IF(push_inst(compiler, (op == SLJIT_CONV_S32_FROM_F64 ? FCTIWZ : FCTIDZ) | FD(TMP_FREG1) | FB(src)));
1913 
1914 	if (op == SLJIT_CONV_SW_FROM_F64) {
1915 		if (FAST_IS_REG(dst)) {
1916 			FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
1917 			return emit_op_mem(compiler, WORD_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1);
1918 		}
1919 		return emit_op_mem(compiler, DOUBLE_DATA, TMP_FREG1, dst, dstw, TMP_REG1);
1920 	}
1921 #else
1922 	FAIL_IF(push_inst(compiler, FCTIWZ | FD(TMP_FREG1) | FB(src)));
1923 #endif
1924 
1925 	if (FAST_IS_REG(dst)) {
1926 		FAIL_IF(load_immediate(compiler, TMP_REG1, FLOAT_TMP_MEM_OFFSET));
1927 		FAIL_IF(push_inst(compiler, STFIWX | FS(TMP_FREG1) | A(SLJIT_SP) | B(TMP_REG1)));
1928 		return emit_op_mem(compiler, INT_DATA | LOAD_DATA, dst, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1);
1929 	}
1930 
1931 	SLJIT_ASSERT(dst & SLJIT_MEM);
1932 
1933 	if (dst & OFFS_REG_MASK) {
1934 		dstw &= 0x3;
1935 		if (dstw) {
1936 			FAIL_IF(push_inst(compiler, SLWI_W(dstw) | S(OFFS_REG(dst)) | A(TMP_REG1)));
1937 			dstw = TMP_REG1;
1938 		}
1939 		else
1940 			dstw = OFFS_REG(dst);
1941 	}
1942 	else {
1943 		if ((dst & REG_MASK) && !dstw) {
1944 			dstw = dst & REG_MASK;
1945 			dst = 0;
1946 		}
1947 		else {
1948 			/* This works regardless we have SLJIT_MEM1 or SLJIT_MEM0. */
1949 			FAIL_IF(load_immediate(compiler, TMP_REG1, dstw));
1950 			dstw = TMP_REG1;
1951 		}
1952 	}
1953 
1954 	return push_inst(compiler, STFIWX | FS(TMP_FREG1) | A(dst & REG_MASK) | B(dstw));
1955 }
1956 
sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1957 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
1958 	sljit_s32 dst, sljit_sw dstw,
1959 	sljit_s32 src, sljit_sw srcw)
1960 {
1961 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
1962 
1963 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1964 
1965 	if (src & SLJIT_IMM) {
1966 		if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
1967 			srcw = (sljit_s32)srcw;
1968 
1969 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw));
1970 		src = TMP_REG1;
1971 	}
1972 	else if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32) {
1973 		if (FAST_IS_REG(src))
1974 			FAIL_IF(push_inst(compiler, EXTSW | S(src) | A(TMP_REG1)));
1975 		else
1976 			FAIL_IF(emit_op_mem(compiler, INT_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
1977 		src = TMP_REG1;
1978 	}
1979 
1980 	if (FAST_IS_REG(src)) {
1981 		FAIL_IF(emit_op_mem(compiler, WORD_DATA, src, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
1982 		FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
1983 	}
1984 	else
1985 		FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, src, srcw, TMP_REG1));
1986 
1987 	FAIL_IF(push_inst(compiler, FCFID | FD(dst_r) | FB(TMP_FREG1)));
1988 
1989 	if (dst & SLJIT_MEM)
1990 		return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
1991 	if (op & SLJIT_32)
1992 		return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r));
1993 	return SLJIT_SUCCESS;
1994 
1995 #else
1996 
1997 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
1998 	sljit_s32 invert_sign = 1;
1999 
2000 	if (src & SLJIT_IMM) {
2001 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw ^ (sljit_sw)0x80000000));
2002 		src = TMP_REG1;
2003 		invert_sign = 0;
2004 	}
2005 	else if (!FAST_IS_REG(src)) {
2006 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | SIGNED_DATA | LOAD_DATA, TMP_REG1, src, srcw, TMP_REG1));
2007 		src = TMP_REG1;
2008 	}
2009 
2010 	/* First, a special double floating point value is constructed: (2^53 + (input xor (2^31)))
2011 	   The double precision format has exactly 53 bit precision, so the lower 32 bit represents
2012 	   the lower 32 bit of such value. The result of xor 2^31 is the same as adding 0x80000000
2013 	   to the input, which shifts it into the 0 - 0xffffffff range. To get the converted floating
2014 	   point value, we need to subtract 2^53 + 2^31 from the constructed value. */
2015 	FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG2) | A(0) | 0x4330));
2016 	if (invert_sign)
2017 		FAIL_IF(push_inst(compiler, XORIS | S(src) | A(TMP_REG1) | 0x8000));
2018 	FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET_HI, TMP_REG1));
2019 	FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET_LOW, TMP_REG2));
2020 	FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG1) | A(0) | 0x8000));
2021 	FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
2022 	FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG1, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET_LOW, TMP_REG2));
2023 	FAIL_IF(emit_op_mem(compiler, DOUBLE_DATA | LOAD_DATA, TMP_FREG2, SLJIT_MEM1(SLJIT_SP), FLOAT_TMP_MEM_OFFSET, TMP_REG1));
2024 
2025 	FAIL_IF(push_inst(compiler, FSUB | FD(dst_r) | FA(TMP_FREG1) | FB(TMP_FREG2)));
2026 
2027 	if (dst & SLJIT_MEM)
2028 		return emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, TMP_REG1);
2029 	if (op & SLJIT_32)
2030 		return push_inst(compiler, FRSP | FD(dst_r) | FB(dst_r));
2031 	return SLJIT_SUCCESS;
2032 
2033 #endif
2034 }
2035 
sljit_emit_fop1_cmp(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)2036 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
2037 	sljit_s32 src1, sljit_sw src1w,
2038 	sljit_s32 src2, sljit_sw src2w)
2039 {
2040 	if (src1 & SLJIT_MEM) {
2041 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, TMP_REG1));
2042 		src1 = TMP_FREG1;
2043 	}
2044 
2045 	if (src2 & SLJIT_MEM) {
2046 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, TMP_REG2));
2047 		src2 = TMP_FREG2;
2048 	}
2049 
2050 	FAIL_IF(push_inst(compiler, FCMPU | CRD(4) | FA(src1) | FB(src2)));
2051 
2052 	switch (GET_FLAG_TYPE(op)) {
2053 	case SLJIT_UNORDERED_OR_EQUAL:
2054 	case SLJIT_ORDERED_NOT_EQUAL:
2055 		return push_inst(compiler, CROR | ((4 + 2) << 21) | ((4 + 2) << 16) | ((4 + 3) << 11));
2056 	case SLJIT_UNORDERED_OR_LESS:
2057 	case SLJIT_ORDERED_GREATER_EQUAL:
2058 		return push_inst(compiler, CROR | ((4 + 0) << 21) | ((4 + 0) << 16) | ((4 + 3) << 11));
2059 	case SLJIT_UNORDERED_OR_GREATER:
2060 	case SLJIT_ORDERED_LESS_EQUAL:
2061 		return push_inst(compiler, CROR | ((4 + 1) << 21) | ((4 + 1) << 16) | ((4 + 3) << 11));
2062 	}
2063 
2064 	return SLJIT_SUCCESS;
2065 }
2066 
sljit_emit_fop1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)2067 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
2068 	sljit_s32 dst, sljit_sw dstw,
2069 	sljit_s32 src, sljit_sw srcw)
2070 {
2071 	sljit_s32 dst_r;
2072 
2073 	CHECK_ERROR();
2074 
2075 	SLJIT_COMPILE_ASSERT((SLJIT_32 == 0x100) && !(DOUBLE_DATA & 0x4), float_transfer_bit_error);
2076 	SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2077 
2078 	if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
2079 		op ^= SLJIT_32;
2080 
2081 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
2082 
2083 	if (src & SLJIT_MEM) {
2084 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, TMP_REG1));
2085 		src = dst_r;
2086 	}
2087 
2088 	switch (GET_OPCODE(op)) {
2089 	case SLJIT_CONV_F64_FROM_F32:
2090 		op ^= SLJIT_32;
2091 		if (op & SLJIT_32) {
2092 			FAIL_IF(push_inst(compiler, FRSP | FD(dst_r) | FB(src)));
2093 			break;
2094 		}
2095 		/* Fall through. */
2096 	case SLJIT_MOV_F64:
2097 		if (src != dst_r) {
2098 			if (dst_r != TMP_FREG1)
2099 				FAIL_IF(push_inst(compiler, FMR | FD(dst_r) | FB(src)));
2100 			else
2101 				dst_r = src;
2102 		}
2103 		break;
2104 	case SLJIT_NEG_F64:
2105 		FAIL_IF(push_inst(compiler, FNEG | FD(dst_r) | FB(src)));
2106 		break;
2107 	case SLJIT_ABS_F64:
2108 		FAIL_IF(push_inst(compiler, FABS | FD(dst_r) | FB(src)));
2109 		break;
2110 	}
2111 
2112 	if (dst & SLJIT_MEM)
2113 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op), dst_r, dst, dstw, TMP_REG1));
2114 	return SLJIT_SUCCESS;
2115 }
2116 
sljit_emit_fop2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)2117 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
2118 	sljit_s32 dst, sljit_sw dstw,
2119 	sljit_s32 src1, sljit_sw src1w,
2120 	sljit_s32 src2, sljit_sw src2w)
2121 {
2122 	sljit_s32 dst_r;
2123 
2124 	CHECK_ERROR();
2125 	CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2126 	ADJUST_LOCAL_OFFSET(dst, dstw);
2127 	ADJUST_LOCAL_OFFSET(src1, src1w);
2128 	ADJUST_LOCAL_OFFSET(src2, src2w);
2129 
2130 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG2;
2131 
2132 	if (src1 & SLJIT_MEM) {
2133 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, TMP_REG1));
2134 		src1 = TMP_FREG1;
2135 	}
2136 
2137 	if (src2 & SLJIT_MEM) {
2138 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, TMP_REG2));
2139 		src2 = TMP_FREG2;
2140 	}
2141 
2142 	switch (GET_OPCODE(op)) {
2143 	case SLJIT_ADD_F64:
2144 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FADDS, FADD) | FD(dst_r) | FA(src1) | FB(src2)));
2145 		break;
2146 
2147 	case SLJIT_SUB_F64:
2148 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FSUBS, FSUB) | FD(dst_r) | FA(src1) | FB(src2)));
2149 		break;
2150 
2151 	case SLJIT_MUL_F64:
2152 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FMULS, FMUL) | FD(dst_r) | FA(src1) | FC(src2) /* FMUL use FC as src2 */));
2153 		break;
2154 
2155 	case SLJIT_DIV_F64:
2156 		FAIL_IF(push_inst(compiler, SELECT_FOP(op, FDIVS, FDIV) | FD(dst_r) | FA(src1) | FB(src2)));
2157 		break;
2158 	}
2159 
2160 	if (dst & SLJIT_MEM)
2161 		FAIL_IF(emit_op_mem(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, TMP_REG1));
2162 
2163 	return SLJIT_SUCCESS;
2164 }
2165 
2166 #undef SELECT_FOP
2167 
2168 /* --------------------------------------------------------------------- */
2169 /*  Other instructions                                                   */
2170 /* --------------------------------------------------------------------- */
2171 
sljit_emit_fast_enter(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)2172 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
2173 {
2174 	CHECK_ERROR();
2175 	CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
2176 	ADJUST_LOCAL_OFFSET(dst, dstw);
2177 
2178 	if (FAST_IS_REG(dst))
2179 		return push_inst(compiler, MFLR | D(dst));
2180 
2181 	/* Memory. */
2182 	FAIL_IF(push_inst(compiler, MFLR | D(TMP_REG2)));
2183 	return emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0);
2184 }
2185 
2186 /* --------------------------------------------------------------------- */
2187 /*  Conditional instructions                                             */
2188 /* --------------------------------------------------------------------- */
2189 
sljit_emit_label(struct sljit_compiler * compiler)2190 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
2191 {
2192 	struct sljit_label *label;
2193 
2194 	CHECK_ERROR_PTR();
2195 	CHECK_PTR(check_sljit_emit_label(compiler));
2196 
2197 	if (compiler->last_label && compiler->last_label->size == compiler->size)
2198 		return compiler->last_label;
2199 
2200 	label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
2201 	PTR_FAIL_IF(!label);
2202 	set_label(label, compiler);
2203 	return label;
2204 }
2205 
get_bo_bi_flags(struct sljit_compiler * compiler,sljit_s32 type)2206 static sljit_ins get_bo_bi_flags(struct sljit_compiler *compiler, sljit_s32 type)
2207 {
2208 	switch (type) {
2209 	case SLJIT_NOT_CARRY:
2210 		if (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_SUB)
2211 			return (4 << 21) | (2 << 16);
2212 		/* fallthrough */
2213 
2214 	case SLJIT_EQUAL:
2215 		return (12 << 21) | (2 << 16);
2216 
2217 	case SLJIT_CARRY:
2218 		if (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_SUB)
2219 			return (12 << 21) | (2 << 16);
2220 		/* fallthrough */
2221 
2222 	case SLJIT_NOT_EQUAL:
2223 		return (4 << 21) | (2 << 16);
2224 
2225 	case SLJIT_LESS:
2226 	case SLJIT_SIG_LESS:
2227 		return (12 << 21) | (0 << 16);
2228 
2229 	case SLJIT_GREATER_EQUAL:
2230 	case SLJIT_SIG_GREATER_EQUAL:
2231 		return (4 << 21) | (0 << 16);
2232 
2233 	case SLJIT_GREATER:
2234 	case SLJIT_SIG_GREATER:
2235 		return (12 << 21) | (1 << 16);
2236 
2237 	case SLJIT_LESS_EQUAL:
2238 	case SLJIT_SIG_LESS_EQUAL:
2239 		return (4 << 21) | (1 << 16);
2240 
2241 	case SLJIT_OVERFLOW:
2242 		return (12 << 21) | (3 << 16);
2243 
2244 	case SLJIT_NOT_OVERFLOW:
2245 		return (4 << 21) | (3 << 16);
2246 
2247 	case SLJIT_F_LESS:
2248 	case SLJIT_ORDERED_LESS:
2249 	case SLJIT_UNORDERED_OR_LESS:
2250 		return (12 << 21) | ((4 + 0) << 16);
2251 
2252 	case SLJIT_F_GREATER_EQUAL:
2253 	case SLJIT_ORDERED_GREATER_EQUAL:
2254 	case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2255 		return (4 << 21) | ((4 + 0) << 16);
2256 
2257 	case SLJIT_F_GREATER:
2258 	case SLJIT_ORDERED_GREATER:
2259 	case SLJIT_UNORDERED_OR_GREATER:
2260 		return (12 << 21) | ((4 + 1) << 16);
2261 
2262 	case SLJIT_F_LESS_EQUAL:
2263 	case SLJIT_ORDERED_LESS_EQUAL:
2264 	case SLJIT_UNORDERED_OR_LESS_EQUAL:
2265 		return (4 << 21) | ((4 + 1) << 16);
2266 
2267 	case SLJIT_F_EQUAL:
2268 	case SLJIT_ORDERED_EQUAL:
2269 	case SLJIT_UNORDERED_OR_EQUAL:
2270 		return (12 << 21) | ((4 + 2) << 16);
2271 
2272 	case SLJIT_F_NOT_EQUAL:
2273 	case SLJIT_ORDERED_NOT_EQUAL:
2274 	case SLJIT_UNORDERED_OR_NOT_EQUAL:
2275 		return (4 << 21) | ((4 + 2) << 16);
2276 
2277 	case SLJIT_UNORDERED:
2278 		return (12 << 21) | ((4 + 3) << 16);
2279 
2280 	case SLJIT_ORDERED:
2281 		return (4 << 21) | ((4 + 3) << 16);
2282 
2283 	default:
2284 		SLJIT_ASSERT(type >= SLJIT_JUMP && type <= SLJIT_CALL_REG_ARG);
2285 		return (20 << 21);
2286 	}
2287 }
2288 
sljit_emit_jump(struct sljit_compiler * compiler,sljit_s32 type)2289 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
2290 {
2291 	struct sljit_jump *jump;
2292 	sljit_ins bo_bi_flags;
2293 
2294 	CHECK_ERROR_PTR();
2295 	CHECK_PTR(check_sljit_emit_jump(compiler, type));
2296 
2297 	bo_bi_flags = get_bo_bi_flags(compiler, type & 0xff);
2298 	if (!bo_bi_flags)
2299 		return NULL;
2300 
2301 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2302 	PTR_FAIL_IF(!jump);
2303 	set_jump(jump, compiler, (sljit_u32)type & SLJIT_REWRITABLE_JUMP);
2304 	type &= 0xff;
2305 
2306 	if (type == SLJIT_CARRY || type == SLJIT_NOT_CARRY)
2307 		PTR_FAIL_IF(push_inst(compiler, ADDE | RC(ALT_SET_FLAGS) | D(TMP_REG1) | A(TMP_ZERO) | B(TMP_ZERO)));
2308 
2309 	/* In PPC, we don't need to touch the arguments. */
2310 	if (type < SLJIT_JUMP)
2311 		jump->flags |= IS_COND;
2312 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
2313 	if (type >= SLJIT_CALL)
2314 		jump->flags |= IS_CALL;
2315 #endif
2316 
2317 	PTR_FAIL_IF(emit_const(compiler, TMP_CALL_REG, 0));
2318 	PTR_FAIL_IF(push_inst(compiler, MTCTR | S(TMP_CALL_REG)));
2319 	jump->addr = compiler->size;
2320 	PTR_FAIL_IF(push_inst(compiler, BCCTR | bo_bi_flags | (type >= SLJIT_FAST_CALL ? 1 : 0)));
2321 	return jump;
2322 }
2323 
sljit_emit_call(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types)2324 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
2325 	sljit_s32 arg_types)
2326 {
2327 	CHECK_ERROR_PTR();
2328 	CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
2329 
2330 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
2331 	if ((type & 0xff) != SLJIT_CALL_REG_ARG)
2332 		PTR_FAIL_IF(call_with_args(compiler, arg_types, NULL));
2333 #endif
2334 
2335 	if (type & SLJIT_CALL_RETURN) {
2336 		PTR_FAIL_IF(emit_stack_frame_release(compiler, 0));
2337 		type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
2338 	}
2339 
2340 	SLJIT_SKIP_CHECKS(compiler);
2341 	return sljit_emit_jump(compiler, type);
2342 }
2343 
sljit_emit_ijump(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 src,sljit_sw srcw)2344 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
2345 {
2346 	struct sljit_jump *jump = NULL;
2347 	sljit_s32 src_r;
2348 
2349 	CHECK_ERROR();
2350 	CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
2351 
2352 	if (FAST_IS_REG(src)) {
2353 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
2354 		if (type >= SLJIT_CALL && src != TMP_CALL_REG) {
2355 			FAIL_IF(push_inst(compiler, OR | S(src) | A(TMP_CALL_REG) | B(src)));
2356 			src_r = TMP_CALL_REG;
2357 		}
2358 		else
2359 			src_r = src;
2360 #else /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
2361 		src_r = src;
2362 #endif /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
2363 	} else if (src & SLJIT_IMM) {
2364 		/* These jumps are converted to jump/call instructions when possible. */
2365 		jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2366 		FAIL_IF(!jump);
2367 		set_jump(jump, compiler, JUMP_ADDR);
2368 		jump->u.target = (sljit_uw)srcw;
2369 
2370 #if (defined SLJIT_PASS_ENTRY_ADDR_TO_CALL && SLJIT_PASS_ENTRY_ADDR_TO_CALL)
2371 		if (type >= SLJIT_CALL)
2372 			jump->flags |= IS_CALL;
2373 #endif /* SLJIT_PASS_ENTRY_ADDR_TO_CALL */
2374 
2375 		FAIL_IF(emit_const(compiler, TMP_CALL_REG, 0));
2376 		src_r = TMP_CALL_REG;
2377 	} else {
2378 		ADJUST_LOCAL_OFFSET(src, srcw);
2379 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_CALL_REG, src, srcw, TMP_CALL_REG));
2380 		src_r = TMP_CALL_REG;
2381 	}
2382 
2383 	FAIL_IF(push_inst(compiler, MTCTR | S(src_r)));
2384 	if (jump)
2385 		jump->addr = compiler->size;
2386 	return push_inst(compiler, BCCTR | (20 << 21) | (type >= SLJIT_FAST_CALL ? 1 : 0));
2387 }
2388 
sljit_emit_icall(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types,sljit_s32 src,sljit_sw srcw)2389 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
2390 	sljit_s32 arg_types,
2391 	sljit_s32 src, sljit_sw srcw)
2392 {
2393 	CHECK_ERROR();
2394 	CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
2395 
2396 	if (src & SLJIT_MEM) {
2397 		ADJUST_LOCAL_OFFSET(src, srcw);
2398 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_CALL_REG, src, srcw, TMP_CALL_REG));
2399 		src = TMP_CALL_REG;
2400 	}
2401 
2402 	if (type & SLJIT_CALL_RETURN) {
2403 		if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
2404 			FAIL_IF(push_inst(compiler, OR | S(src) | A(TMP_CALL_REG) | B(src)));
2405 			src = TMP_CALL_REG;
2406 		}
2407 
2408 		FAIL_IF(emit_stack_frame_release(compiler, 0));
2409 		type = SLJIT_JUMP;
2410 	}
2411 
2412 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
2413 	if ((type & 0xff) != SLJIT_CALL_REG_ARG)
2414 		FAIL_IF(call_with_args(compiler, arg_types, &src));
2415 #endif
2416 
2417 	SLJIT_SKIP_CHECKS(compiler);
2418 	return sljit_emit_ijump(compiler, type, src, srcw);
2419 }
2420 
sljit_emit_op_flags(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 type)2421 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
2422 	sljit_s32 dst, sljit_sw dstw,
2423 	sljit_s32 type)
2424 {
2425 	sljit_s32 reg, invert;
2426 	sljit_u32 bit, from_xer;
2427 	sljit_s32 saved_op = op;
2428 	sljit_sw saved_dstw = dstw;
2429 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
2430 	sljit_s32 input_flags = ((op & SLJIT_32) || op == SLJIT_MOV32) ? INT_DATA : WORD_DATA;
2431 #else
2432 	sljit_s32 input_flags = WORD_DATA;
2433 #endif
2434 
2435 	CHECK_ERROR();
2436 	CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
2437 	ADJUST_LOCAL_OFFSET(dst, dstw);
2438 
2439 	op = GET_OPCODE(op);
2440 	reg = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
2441 
2442 	if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
2443 		FAIL_IF(emit_op_mem(compiler, input_flags | LOAD_DATA, TMP_REG1, dst, dstw, TMP_REG1));
2444 
2445 	invert = 0;
2446 	bit = 0;
2447 	from_xer = 0;
2448 
2449 	switch (type) {
2450 	case SLJIT_LESS:
2451 	case SLJIT_SIG_LESS:
2452 		break;
2453 
2454 	case SLJIT_GREATER_EQUAL:
2455 	case SLJIT_SIG_GREATER_EQUAL:
2456 		invert = 1;
2457 		break;
2458 
2459 	case SLJIT_GREATER:
2460 	case SLJIT_SIG_GREATER:
2461 		bit = 1;
2462 		break;
2463 
2464 	case SLJIT_LESS_EQUAL:
2465 	case SLJIT_SIG_LESS_EQUAL:
2466 		bit = 1;
2467 		invert = 1;
2468 		break;
2469 
2470 	case SLJIT_EQUAL:
2471 		bit = 2;
2472 		break;
2473 
2474 	case SLJIT_NOT_EQUAL:
2475 		bit = 2;
2476 		invert = 1;
2477 		break;
2478 
2479 	case SLJIT_OVERFLOW:
2480 		from_xer = 1;
2481 		bit = 1;
2482 		break;
2483 
2484 	case SLJIT_NOT_OVERFLOW:
2485 		from_xer = 1;
2486 		bit = 1;
2487 		invert = 1;
2488 		break;
2489 
2490 	case SLJIT_CARRY:
2491 		from_xer = 1;
2492 		bit = 2;
2493 		invert = (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_SUB) != 0;
2494 		break;
2495 
2496 	case SLJIT_NOT_CARRY:
2497 		from_xer = 1;
2498 		bit = 2;
2499 		invert = (compiler->status_flags_state & SLJIT_CURRENT_FLAGS_ADD) != 0;
2500 		break;
2501 
2502 	case SLJIT_F_LESS:
2503 	case SLJIT_ORDERED_LESS:
2504 	case SLJIT_UNORDERED_OR_LESS:
2505 		bit = 4 + 0;
2506 		break;
2507 
2508 	case SLJIT_F_GREATER_EQUAL:
2509 	case SLJIT_ORDERED_GREATER_EQUAL:
2510 	case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2511 		bit = 4 + 0;
2512 		invert = 1;
2513 		break;
2514 
2515 	case SLJIT_F_GREATER:
2516 	case SLJIT_ORDERED_GREATER:
2517 	case SLJIT_UNORDERED_OR_GREATER:
2518 		bit = 4 + 1;
2519 		break;
2520 
2521 	case SLJIT_F_LESS_EQUAL:
2522 	case SLJIT_ORDERED_LESS_EQUAL:
2523 	case SLJIT_UNORDERED_OR_LESS_EQUAL:
2524 		bit = 4 + 1;
2525 		invert = 1;
2526 		break;
2527 
2528 	case SLJIT_F_EQUAL:
2529 	case SLJIT_ORDERED_EQUAL:
2530 	case SLJIT_UNORDERED_OR_EQUAL:
2531 		bit = 4 + 2;
2532 		break;
2533 
2534 	case SLJIT_F_NOT_EQUAL:
2535 	case SLJIT_ORDERED_NOT_EQUAL:
2536 	case SLJIT_UNORDERED_OR_NOT_EQUAL:
2537 		bit = 4 + 2;
2538 		invert = 1;
2539 		break;
2540 
2541 	case SLJIT_UNORDERED:
2542 		bit = 4 + 3;
2543 		break;
2544 
2545 	case SLJIT_ORDERED:
2546 		bit = 4 + 3;
2547 		invert = 1;
2548 		break;
2549 
2550 	default:
2551 		SLJIT_UNREACHABLE();
2552 		break;
2553 	}
2554 
2555 	FAIL_IF(push_inst(compiler, (from_xer ? MFXER : MFCR) | D(reg)));
2556 	/* Simplified mnemonics: extrwi. */
2557 	FAIL_IF(push_inst(compiler, RLWINM | S(reg) | A(reg) | RLWI_SH(1 + bit) | RLWI_MBE(31, 31)));
2558 
2559 	if (invert)
2560 		FAIL_IF(push_inst(compiler, XORI | S(reg) | A(reg) | 0x1));
2561 
2562 	if (op < SLJIT_ADD) {
2563 		if (!(dst & SLJIT_MEM))
2564 			return SLJIT_SUCCESS;
2565 		return emit_op_mem(compiler, input_flags, reg, dst, dstw, TMP_REG1);
2566 	}
2567 
2568 	SLJIT_SKIP_CHECKS(compiler);
2569 
2570 	if (dst & SLJIT_MEM)
2571 		return sljit_emit_op2(compiler, saved_op, dst, saved_dstw, TMP_REG1, 0, TMP_REG2, 0);
2572 	return sljit_emit_op2(compiler, saved_op, dst, 0, dst, 0, TMP_REG2, 0);
2573 }
2574 
sljit_emit_cmov(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 dst_reg,sljit_s32 src,sljit_sw srcw)2575 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
2576 	sljit_s32 dst_reg,
2577 	sljit_s32 src, sljit_sw srcw)
2578 {
2579 	CHECK_ERROR();
2580 	CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
2581 
2582 	return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
2583 }
2584 
2585 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
2586 
2587 #define EMIT_MEM_LOAD_IMM(inst, mem, memw) \
2588 	((sljit_s16)(memw) > SIMM_MAX - SSIZE_OF(sw))
2589 
2590 #else /* !SLJIT_CONFIG_PPC_32 */
2591 
2592 #define EMIT_MEM_LOAD_IMM(inst, mem, memw) \
2593 	((((inst) & INT_ALIGNED) && ((memw) & 0x3) != 0) \
2594 		|| ((sljit_s16)(memw) > SIMM_MAX - SSIZE_OF(sw)) \
2595 		|| ((memw) > 0x7fff7fffl || (memw) < -0x80000000l)) \
2596 
2597 #endif /* SLJIT_CONFIG_PPC_32 */
2598 
sljit_emit_mem(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 reg,sljit_s32 mem,sljit_sw memw)2599 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
2600 	sljit_s32 reg,
2601 	sljit_s32 mem, sljit_sw memw)
2602 {
2603 	sljit_ins inst;
2604 
2605 	CHECK_ERROR();
2606 	CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
2607 
2608 	if (!(reg & REG_PAIR_MASK))
2609 		return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
2610 
2611 	ADJUST_LOCAL_OFFSET(mem, memw);
2612 
2613 	inst = data_transfer_insts[WORD_DATA | ((type & SLJIT_MEM_STORE) ? 0 : LOAD_DATA)];
2614 
2615 	if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
2616 		memw &= 0x3;
2617 
2618 		if (memw != 0) {
2619 			FAIL_IF(push_inst(compiler, SLWI_W(memw) | S(OFFS_REG(mem)) | A(TMP_REG1)));
2620 			FAIL_IF(push_inst(compiler, ADD | D(TMP_REG1) | A(TMP_REG1) | B(mem & REG_MASK)));
2621 		} else
2622 			FAIL_IF(push_inst(compiler, ADD | D(TMP_REG1) | A(mem & REG_MASK) | B(OFFS_REG(mem))));
2623 
2624 		mem = TMP_REG1;
2625 		memw = 0;
2626 	} else {
2627 		if (EMIT_MEM_LOAD_IMM(inst, mem, memw)) {
2628 			if ((mem & REG_MASK) != 0) {
2629 				SLJIT_SKIP_CHECKS(compiler);
2630 				FAIL_IF(sljit_emit_op2(compiler, SLJIT_ADD, TMP_REG1, 0, mem & REG_MASK, 0, SLJIT_IMM, memw));
2631 			} else
2632 				FAIL_IF(load_immediate(compiler, TMP_REG1, memw));
2633 
2634 			memw = 0;
2635 			mem = TMP_REG1;
2636 		} else if (memw > SIMM_MAX || memw < SIMM_MIN) {
2637 			FAIL_IF(push_inst(compiler, ADDIS | D(TMP_REG1) | A(mem & REG_MASK) | IMM((memw + 0x8000) >> 16)));
2638 
2639 			memw &= 0xffff;
2640 			mem = TMP_REG1;
2641 		} else {
2642 			memw &= 0xffff;
2643 			mem &= REG_MASK;
2644 		}
2645 	}
2646 
2647 	SLJIT_ASSERT((memw >= 0 && memw <= SIMM_MAX - SSIZE_OF(sw)) || (memw >= 0x8000 && memw <= 0xffff));
2648 
2649 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
2650 	inst &= (sljit_ins)~INT_ALIGNED;
2651 #endif /* SLJIT_CONFIG_PPC_64 */
2652 
2653 	if (!(type & SLJIT_MEM_STORE) && mem == REG_PAIR_FIRST(reg)) {
2654 		FAIL_IF(push_inst(compiler, inst | D(REG_PAIR_SECOND(reg)) | A(mem) | IMM(memw + SSIZE_OF(sw))));
2655 		return push_inst(compiler, inst | D(REG_PAIR_FIRST(reg)) | A(mem) | IMM(memw));
2656 	}
2657 
2658 	FAIL_IF(push_inst(compiler, inst | D(REG_PAIR_FIRST(reg)) | A(mem) | IMM(memw)));
2659 	return push_inst(compiler, inst | D(REG_PAIR_SECOND(reg)) | A(mem) | IMM(memw + SSIZE_OF(sw)));
2660 }
2661 
2662 #undef EMIT_MEM_LOAD_IMM
2663 
sljit_emit_mem_update(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 reg,sljit_s32 mem,sljit_sw memw)2664 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem_update(struct sljit_compiler *compiler, sljit_s32 type,
2665 	sljit_s32 reg,
2666 	sljit_s32 mem, sljit_sw memw)
2667 {
2668 	sljit_s32 mem_flags;
2669 	sljit_ins inst;
2670 
2671 	CHECK_ERROR();
2672 	CHECK(check_sljit_emit_mem_update(compiler, type, reg, mem, memw));
2673 
2674 	if (type & SLJIT_MEM_POST)
2675 		return SLJIT_ERR_UNSUPPORTED;
2676 
2677 	switch (type & 0xff) {
2678 	case SLJIT_MOV:
2679 	case SLJIT_MOV_P:
2680 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
2681 	case SLJIT_MOV_U32:
2682 	case SLJIT_MOV_S32:
2683 	case SLJIT_MOV32:
2684 #endif
2685 		mem_flags = WORD_DATA;
2686 		break;
2687 
2688 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
2689 	case SLJIT_MOV_U32:
2690 	case SLJIT_MOV32:
2691 		mem_flags = INT_DATA;
2692 		break;
2693 
2694 	case SLJIT_MOV_S32:
2695 		mem_flags = INT_DATA;
2696 
2697 		if (!(type & SLJIT_MEM_STORE) && !(type & SLJIT_32)) {
2698 			if (mem & OFFS_REG_MASK)
2699 				mem_flags |= SIGNED_DATA;
2700 			else
2701 				return SLJIT_ERR_UNSUPPORTED;
2702 		}
2703 		break;
2704 #endif
2705 
2706 	case SLJIT_MOV_U8:
2707 	case SLJIT_MOV_S8:
2708 		mem_flags = BYTE_DATA;
2709 		break;
2710 
2711 	case SLJIT_MOV_U16:
2712 		mem_flags = HALF_DATA;
2713 		break;
2714 
2715 	case SLJIT_MOV_S16:
2716 		mem_flags = HALF_DATA | SIGNED_DATA;
2717 		break;
2718 
2719 	default:
2720 		SLJIT_UNREACHABLE();
2721 		mem_flags = WORD_DATA;
2722 		break;
2723 	}
2724 
2725 	if (!(type & SLJIT_MEM_STORE))
2726 		mem_flags |= LOAD_DATA;
2727 
2728 	if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
2729 		if (memw != 0)
2730 			return SLJIT_ERR_UNSUPPORTED;
2731 
2732 		if (type & SLJIT_MEM_SUPP)
2733 			return SLJIT_SUCCESS;
2734 
2735 		inst = updated_data_transfer_insts[mem_flags | INDEXED];
2736 		FAIL_IF(push_inst(compiler, INST_CODE_AND_DST(inst, 0, reg) | A(mem & REG_MASK) | B(OFFS_REG(mem))));
2737 	}
2738 	else {
2739 		if (memw > SIMM_MAX || memw < SIMM_MIN)
2740 			return SLJIT_ERR_UNSUPPORTED;
2741 
2742 		inst = updated_data_transfer_insts[mem_flags];
2743 
2744 #if (defined SLJIT_CONFIG_PPC_64 && SLJIT_CONFIG_PPC_64)
2745 		if ((inst & INT_ALIGNED) && (memw & 0x3) != 0)
2746 			return SLJIT_ERR_UNSUPPORTED;
2747 #endif
2748 
2749 		if (type & SLJIT_MEM_SUPP)
2750 			return SLJIT_SUCCESS;
2751 
2752 		FAIL_IF(push_inst(compiler, INST_CODE_AND_DST(inst, 0, reg) | A(mem & REG_MASK) | IMM(memw)));
2753 	}
2754 
2755 	if ((mem_flags & LOAD_DATA) && (type & 0xff) == SLJIT_MOV_S8)
2756 		return push_inst(compiler, EXTSB | S(reg) | A(reg));
2757 	return SLJIT_SUCCESS;
2758 }
2759 
sljit_emit_fmem_update(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 freg,sljit_s32 mem,sljit_sw memw)2760 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fmem_update(struct sljit_compiler *compiler, sljit_s32 type,
2761 	sljit_s32 freg,
2762 	sljit_s32 mem, sljit_sw memw)
2763 {
2764 	sljit_s32 mem_flags;
2765 	sljit_ins inst;
2766 
2767 	CHECK_ERROR();
2768 	CHECK(check_sljit_emit_fmem_update(compiler, type, freg, mem, memw));
2769 
2770 	if (type & SLJIT_MEM_POST)
2771 		return SLJIT_ERR_UNSUPPORTED;
2772 
2773 	if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
2774 		if (memw != 0)
2775 			return SLJIT_ERR_UNSUPPORTED;
2776 	}
2777 	else {
2778 		if (memw > SIMM_MAX || memw < SIMM_MIN)
2779 			return SLJIT_ERR_UNSUPPORTED;
2780 	}
2781 
2782 	if (type & SLJIT_MEM_SUPP)
2783 		return SLJIT_SUCCESS;
2784 
2785 	mem_flags = FLOAT_DATA(type);
2786 
2787 	if (!(type & SLJIT_MEM_STORE))
2788 		mem_flags |= LOAD_DATA;
2789 
2790 	if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
2791 		inst = updated_data_transfer_insts[mem_flags | INDEXED];
2792 		return push_inst(compiler, INST_CODE_AND_DST(inst, DOUBLE_DATA, freg) | A(mem & REG_MASK) | B(OFFS_REG(mem)));
2793 	}
2794 
2795 	inst = updated_data_transfer_insts[mem_flags];
2796 	return push_inst(compiler, INST_CODE_AND_DST(inst, DOUBLE_DATA, freg) | A(mem & REG_MASK) | IMM(memw));
2797 }
2798 
sljit_emit_const(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_sw init_value)2799 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
2800 {
2801 	struct sljit_const *const_;
2802 	sljit_s32 dst_r;
2803 
2804 	CHECK_ERROR_PTR();
2805 	CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
2806 	ADJUST_LOCAL_OFFSET(dst, dstw);
2807 
2808 	const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
2809 	PTR_FAIL_IF(!const_);
2810 	set_const(const_, compiler);
2811 
2812 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
2813 	PTR_FAIL_IF(emit_const(compiler, dst_r, init_value));
2814 
2815 	if (dst & SLJIT_MEM)
2816 		PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0));
2817 
2818 	return const_;
2819 }
2820 
sljit_emit_put_label(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)2821 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
2822 {
2823 	struct sljit_put_label *put_label;
2824 	sljit_s32 dst_r;
2825 
2826 	CHECK_ERROR_PTR();
2827 	CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
2828 	ADJUST_LOCAL_OFFSET(dst, dstw);
2829 
2830 	put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
2831 	PTR_FAIL_IF(!put_label);
2832 	set_put_label(put_label, compiler, 0);
2833 
2834 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
2835 #if (defined SLJIT_CONFIG_PPC_32 && SLJIT_CONFIG_PPC_32)
2836 	PTR_FAIL_IF(emit_const(compiler, dst_r, 0));
2837 #else
2838 	PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
2839 	compiler->size += 4;
2840 #endif
2841 
2842 	if (dst & SLJIT_MEM)
2843 		PTR_FAIL_IF(emit_op(compiler, SLJIT_MOV, WORD_DATA, dst, dstw, TMP_REG1, 0, TMP_REG2, 0));
2844 
2845 	return put_label;
2846 }
2847 
sljit_set_const(sljit_uw addr,sljit_sw new_constant,sljit_sw executable_offset)2848 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
2849 {
2850 	sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
2851 }
2852