• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  *    Stack-less Just-In-Time compiler
3  *
4  *    Copyright Zoltan Herczeg (hzmester@freemail.hu). All rights reserved.
5  *
6  * Redistribution and use in source and binary forms, with or without modification, are
7  * permitted provided that the following conditions are met:
8  *
9  *   1. Redistributions of source code must retain the above copyright notice, this list of
10  *      conditions and the following disclaimer.
11  *
12  *   2. Redistributions in binary form must reproduce the above copyright notice, this list
13  *      of conditions and the following disclaimer in the documentation and/or other materials
14  *      provided with the distribution.
15  *
16  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND CONTRIBUTORS ``AS IS'' AND ANY
17  * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
19  * SHALL THE COPYRIGHT HOLDER(S) OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
20  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
21  * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
22  * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
23  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
24  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
25  */
26 
sljit_get_platform_name(void)27 SLJIT_API_FUNC_ATTRIBUTE const char* sljit_get_platform_name(void)
28 {
29 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
30 	return "RISC-V-32" SLJIT_CPUINFO;
31 #else /* !SLJIT_CONFIG_RISCV_32 */
32 	return "RISC-V-64" SLJIT_CPUINFO;
33 #endif /* SLJIT_CONFIG_RISCV_32 */
34 }
35 
36 /* Length of an instruction word
37    Both for riscv-32 and riscv-64 */
38 typedef sljit_u32 sljit_ins;
39 
40 #define TMP_REG1	(SLJIT_NUMBER_OF_REGISTERS + 2)
41 #define TMP_REG2	(SLJIT_NUMBER_OF_REGISTERS + 3)
42 #define TMP_REG3	(SLJIT_NUMBER_OF_REGISTERS + 4)
43 #define TMP_ZERO	0
44 
45 /* Flags are kept in volatile registers. */
46 #define EQUAL_FLAG	(SLJIT_NUMBER_OF_REGISTERS + 5)
47 #define RETURN_ADDR_REG	TMP_REG2
48 #define OTHER_FLAG	(SLJIT_NUMBER_OF_REGISTERS + 6)
49 
50 #define TMP_FREG1	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1)
51 #define TMP_FREG2	(SLJIT_NUMBER_OF_FLOAT_REGISTERS + 2)
52 
53 static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS + 7] = {
54 	0, 10, 11, 12, 13, 14, 15, 16, 17, 29, 30, 31, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 9, 8, 2, 6, 1, 7, 5, 28
55 };
56 
57 static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS + 3] = {
58 	0, 10, 11, 12, 13, 14, 15, 16, 17, 2, 3, 4, 5, 6, 7, 28, 29, 30, 31, 27, 26, 25, 24, 23, 22, 21, 20, 19, 18, 9, 8, 0, 1,
59 };
60 
61 /* --------------------------------------------------------------------- */
62 /*  Instrucion forms                                                     */
63 /* --------------------------------------------------------------------- */
64 
65 #define RD(rd)		((sljit_ins)reg_map[rd] << 7)
66 #define RS1(rs1)	((sljit_ins)reg_map[rs1] << 15)
67 #define RS2(rs2)	((sljit_ins)reg_map[rs2] << 20)
68 #define FRD(rd)		((sljit_ins)freg_map[rd] << 7)
69 #define FRS1(rs1)	((sljit_ins)freg_map[rs1] << 15)
70 #define FRS2(rs2)	((sljit_ins)freg_map[rs2] << 20)
71 #define IMM_I(imm)	((sljit_ins)(imm) << 20)
72 #define IMM_S(imm)	((((sljit_ins)(imm) & 0xfe0) << 20) | (((sljit_ins)(imm) & 0x1f) << 7))
73 
74 /* Represents funct(i) parts of the instructions. */
75 #define OPC(o)		((sljit_ins)(o))
76 #define F3(f)		((sljit_ins)(f) << 12)
77 #define F12(f)		((sljit_ins)(f) << 20)
78 #define F7(f)		((sljit_ins)(f) << 25)
79 
80 #define ADD		(F7(0x0) | F3(0x0) | OPC(0x33))
81 #define ADDI		(F3(0x0) | OPC(0x13))
82 #define AND		(F7(0x0) | F3(0x7) | OPC(0x33))
83 #define ANDI		(F3(0x7) | OPC(0x13))
84 #define AUIPC		(OPC(0x17))
85 #define BEQ		(F3(0x0) | OPC(0x63))
86 #define BNE		(F3(0x1) | OPC(0x63))
87 #define BLT		(F3(0x4) | OPC(0x63))
88 #define BGE		(F3(0x5) | OPC(0x63))
89 #define BLTU		(F3(0x6) | OPC(0x63))
90 #define BGEU		(F3(0x7) | OPC(0x63))
91 #define DIV		(F7(0x1) | F3(0x4) | OPC(0x33))
92 #define DIVU		(F7(0x1) | F3(0x5) | OPC(0x33))
93 #define EBREAK		(F12(0x1) | F3(0x0) | OPC(0x73))
94 #define FADD_S		(F7(0x0) | F3(0x7) | OPC(0x53))
95 #define FDIV_S		(F7(0xc) | F3(0x7) | OPC(0x53))
96 #define FEQ_S		(F7(0x50) | F3(0x2) | OPC(0x53))
97 #define FLD		(F3(0x3) | OPC(0x7))
98 #define FLE_S		(F7(0x50) | F3(0x0) | OPC(0x53))
99 #define FLT_S		(F7(0x50) | F3(0x1) | OPC(0x53))
100 #define FSD		(F3(0x3) | OPC(0x27))
101 /* These conversion opcodes are partly defined. */
102 #define FCVT_S_D	(F7(0x20) | OPC(0x53))
103 #define FCVT_S_W	(F7(0x68) | OPC(0x53))
104 #define FCVT_W_S	(F7(0x60) | F3(0x1) | OPC(0x53))
105 #define FMUL_S		(F7(0x8) | F3(0x7) | OPC(0x53))
106 #define FSGNJ_S		(F7(0x10) | F3(0x0) | OPC(0x53))
107 #define FSGNJN_S	(F7(0x10) | F3(0x1) | OPC(0x53))
108 #define FSGNJX_S	(F7(0x10) | F3(0x2) | OPC(0x53))
109 #define FSUB_S		(F7(0x4) | F3(0x7) | OPC(0x53))
110 #define JAL		(OPC(0x6f))
111 #define JALR		(F3(0x0) | OPC(0x67))
112 #define LD		(F3(0x3) | OPC(0x3))
113 #define LUI		(OPC(0x37))
114 #define LW		(F3(0x2) | OPC(0x3))
115 #define MUL		(F7(0x1) | F3(0x0) | OPC(0x33))
116 #define MULH		(F7(0x1) | F3(0x1) | OPC(0x33))
117 #define MULHU		(F7(0x1) | F3(0x3) | OPC(0x33))
118 #define OR		(F7(0x0) | F3(0x6) | OPC(0x33))
119 #define ORI		(F3(0x6) | OPC(0x13))
120 #define REM		(F7(0x1) | F3(0x6) | OPC(0x33))
121 #define REMU		(F7(0x1) | F3(0x7) | OPC(0x33))
122 #define SD		(F3(0x3) | OPC(0x23))
123 #define SLL		(F7(0x0) | F3(0x1) | OPC(0x33))
124 #define SLLI		(IMM_I(0x0) | F3(0x1) | OPC(0x13))
125 #define SLT		(F7(0x0) | F3(0x2) | OPC(0x33))
126 #define SLTI		(F3(0x2) | OPC(0x13))
127 #define SLTU		(F7(0x0) | F3(0x3) | OPC(0x33))
128 #define SLTUI		(F3(0x3) | OPC(0x13))
129 #define SRL		(F7(0x0) | F3(0x5) | OPC(0x33))
130 #define SRLI		(IMM_I(0x0) | F3(0x5) | OPC(0x13))
131 #define SRA		(F7(0x20) | F3(0x5) | OPC(0x33))
132 #define SRAI		(IMM_I(0x400) | F3(0x5) | OPC(0x13))
133 #define SUB		(F7(0x20) | F3(0x0) | OPC(0x33))
134 #define SW		(F3(0x2) | OPC(0x23))
135 #define XOR		(F7(0x0) | F3(0x4) | OPC(0x33))
136 #define XORI		(F3(0x4) | OPC(0x13))
137 
138 #define SIMM_MAX	(0x7ff)
139 #define SIMM_MIN	(-0x800)
140 #define BRANCH_MAX	(0xfff)
141 #define BRANCH_MIN	(-0x1000)
142 #define JUMP_MAX	(0xfffff)
143 #define JUMP_MIN	(-0x100000)
144 
145 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
146 #define S32_MAX		(0x7ffff7ffl)
147 #define S32_MIN		(-0x80000000l)
148 #define S44_MAX		(0x7fffffff7ffl)
149 #define S52_MAX		(0x7ffffffffffffl)
150 #endif
151 
push_inst(struct sljit_compiler * compiler,sljit_ins ins)152 static sljit_s32 push_inst(struct sljit_compiler *compiler, sljit_ins ins)
153 {
154 	sljit_ins *ptr = (sljit_ins*)ensure_buf(compiler, sizeof(sljit_ins));
155 	FAIL_IF(!ptr);
156 	*ptr = ins;
157 	compiler->size++;
158 	return SLJIT_SUCCESS;
159 }
160 
push_imm_s_inst(struct sljit_compiler * compiler,sljit_ins ins,sljit_sw imm)161 static sljit_s32 push_imm_s_inst(struct sljit_compiler *compiler, sljit_ins ins, sljit_sw imm)
162 {
163 	return push_inst(compiler, ins | IMM_S(imm));
164 }
165 
detect_jump_type(struct sljit_jump * jump,sljit_ins * code,sljit_sw executable_offset)166 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code, sljit_sw executable_offset)
167 {
168 	sljit_sw diff;
169 	sljit_uw target_addr;
170 	sljit_ins *inst;
171 
172 	inst = (sljit_ins *)jump->addr;
173 
174 	if (jump->flags & SLJIT_REWRITABLE_JUMP)
175 		goto exit;
176 
177 	if (jump->flags & JUMP_ADDR)
178 		target_addr = jump->u.target;
179 	else {
180 		SLJIT_ASSERT(jump->flags & JUMP_LABEL);
181 		target_addr = (sljit_uw)(code + jump->u.label->size) + (sljit_uw)executable_offset;
182 	}
183 
184 	diff = (sljit_sw)target_addr - (sljit_sw)inst - executable_offset;
185 
186 	if (jump->flags & IS_COND) {
187 		inst--;
188 		diff += SSIZE_OF(ins);
189 
190 		if (diff >= BRANCH_MIN && diff <= BRANCH_MAX) {
191 			jump->flags |= PATCH_B;
192 			inst[0] = (inst[0] & 0x1fff07f) ^ 0x1000;
193 			jump->addr = (sljit_uw)inst;
194 			return inst;
195 		}
196 
197 		inst++;
198 		diff -= SSIZE_OF(ins);
199 	}
200 
201 	if (diff >= JUMP_MIN && diff <= JUMP_MAX) {
202 		if (jump->flags & IS_COND) {
203 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
204 			inst[-1] -= (sljit_ins)(1 * sizeof(sljit_ins)) << 7;
205 #else
206 			inst[-1] -= (sljit_ins)(5 * sizeof(sljit_ins)) << 7;
207 #endif
208 		}
209 
210 		jump->flags |= PATCH_J;
211 		return inst;
212 	}
213 
214 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
215 	if (diff >= S32_MIN && diff <= S32_MAX) {
216 		if (jump->flags & IS_COND)
217 			inst[-1] -= (sljit_ins)(4 * sizeof(sljit_ins)) << 7;
218 
219 		jump->flags |= PATCH_REL32;
220 		inst[1] = inst[0];
221 		return inst + 1;
222 	}
223 
224 	if (target_addr <= (sljit_uw)S32_MAX) {
225 		if (jump->flags & IS_COND)
226 			inst[-1] -= (sljit_ins)(4 * sizeof(sljit_ins)) << 7;
227 
228 		jump->flags |= PATCH_ABS32;
229 		inst[1] = inst[0];
230 		return inst + 1;
231 	}
232 
233 	if (target_addr <= S44_MAX) {
234 		if (jump->flags & IS_COND)
235 			inst[-1] -= (sljit_ins)(2 * sizeof(sljit_ins)) << 7;
236 
237 		jump->flags |= PATCH_ABS44;
238 		inst[3] = inst[0];
239 		return inst + 3;
240 	}
241 
242 	if (target_addr <= S52_MAX) {
243 		if (jump->flags & IS_COND)
244 			inst[-1] -= (sljit_ins)(1 * sizeof(sljit_ins)) << 7;
245 
246 		jump->flags |= PATCH_ABS52;
247 		inst[4] = inst[0];
248 		return inst + 4;
249 	}
250 #endif
251 
252 exit:
253 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
254 	inst[1] = inst[0];
255 	return inst + 1;
256 #else
257 	inst[5] = inst[0];
258 	return inst + 5;
259 #endif
260 }
261 
262 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
263 
put_label_get_length(struct sljit_put_label * put_label,sljit_uw max_label)264 static SLJIT_INLINE sljit_sw put_label_get_length(struct sljit_put_label *put_label, sljit_uw max_label)
265 {
266 	if (max_label <= (sljit_uw)S32_MAX) {
267 		put_label->flags = PATCH_ABS32;
268 		return 1;
269 	}
270 
271 	if (max_label <= S44_MAX) {
272 		put_label->flags = PATCH_ABS44;
273 		return 3;
274 	}
275 
276 	if (max_label <= S52_MAX) {
277 		put_label->flags = PATCH_ABS52;
278 		return 4;
279 	}
280 
281 	put_label->flags = 0;
282 	return 5;
283 }
284 
285 #endif /* SLJIT_CONFIG_RISCV_64 */
286 
load_addr_to_reg(void * dst,sljit_u32 reg)287 static SLJIT_INLINE void load_addr_to_reg(void *dst, sljit_u32 reg)
288 {
289 	struct sljit_jump *jump = NULL;
290 	struct sljit_put_label *put_label;
291 	sljit_uw flags;
292 	sljit_ins *inst;
293 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
294 	sljit_sw high;
295 #endif
296 	sljit_uw addr;
297 
298 	if (reg != 0) {
299 		jump = (struct sljit_jump*)dst;
300 		flags = jump->flags;
301 		inst = (sljit_ins*)jump->addr;
302 		addr = (flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
303 	} else {
304 		put_label = (struct sljit_put_label*)dst;
305 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
306 		flags = put_label->flags;
307 #endif
308 		inst = (sljit_ins*)put_label->addr;
309 		addr = put_label->label->addr;
310 		reg = *inst;
311 	}
312 
313 	if ((addr & 0x800) != 0)
314 		addr += 0x1000;
315 
316 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
317 	inst[0] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
318 #else /* !SLJIT_CONFIG_RISCV_32 */
319 
320 	if (flags & PATCH_ABS32) {
321 		SLJIT_ASSERT(addr <= S32_MAX);
322 		inst[0] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
323 	} else if (flags & PATCH_ABS44) {
324 		high = (sljit_sw)addr >> 12;
325 		SLJIT_ASSERT((sljit_uw)high <= 0x7fffffff);
326 
327 		if (high > S32_MAX) {
328 			SLJIT_ASSERT((high & 0x800) != 0);
329 			inst[0] = LUI | RD(reg) | (sljit_ins)0x80000000u;
330 			inst[1] = XORI | RD(reg) | RS1(reg) | IMM_I(high);
331 		} else {
332 			if ((high & 0x800) != 0)
333 				high += 0x1000;
334 
335 			inst[0] = LUI | RD(reg) | (sljit_ins)(high & ~0xfff);
336 			inst[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(high);
337 		}
338 
339 		inst[2] = SLLI | RD(reg) | RS1(reg) | IMM_I(12);
340 		inst += 2;
341 	} else {
342 		high = (sljit_sw)addr >> 32;
343 
344 		if ((addr & 0x80000000l) != 0)
345 			high = ~high;
346 
347 		if ((high & 0x800) != 0)
348 			high += 0x1000;
349 
350 		if (flags & PATCH_ABS52) {
351 			SLJIT_ASSERT(addr <= S52_MAX);
352 			inst[0] = LUI | RD(TMP_REG3) | (sljit_ins)(high << 12);
353 		} else {
354 			inst[0] = LUI | RD(TMP_REG3) | (sljit_ins)(high & ~0xfff);
355 			inst[1] = ADDI | RD(TMP_REG3) | RS1(TMP_REG3) | IMM_I(high);
356 			inst++;
357 		}
358 
359 		inst[1] = LUI | RD(reg) | (sljit_ins)((sljit_sw)addr & ~0xfff);
360 		inst[2] = SLLI | RD(TMP_REG3) | RS1(TMP_REG3) | IMM_I((flags & PATCH_ABS52) ? 20 : 32);
361 		inst[3] = XOR | RD(reg) | RS1(reg) | RS2(TMP_REG3);
362 		inst += 3;
363 	}
364 #endif /* !SLJIT_CONFIG_RISCV_32 */
365 
366 	if (jump != NULL) {
367 		SLJIT_ASSERT((inst[1] & 0x707f) == JALR);
368 		inst[1] = (inst[1] & 0xfffff) | IMM_I(addr);
369 	} else
370 		inst[1] = ADDI | RD(reg) | RS1(reg) | IMM_I(addr);
371 }
372 
sljit_generate_code(struct sljit_compiler * compiler)373 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(struct sljit_compiler *compiler)
374 {
375 	struct sljit_memory_fragment *buf;
376 	sljit_ins *code;
377 	sljit_ins *code_ptr;
378 	sljit_ins *buf_ptr;
379 	sljit_ins *buf_end;
380 	sljit_uw word_count;
381 	sljit_uw next_addr;
382 	sljit_sw executable_offset;
383 	sljit_uw addr;
384 
385 	struct sljit_label *label;
386 	struct sljit_jump *jump;
387 	struct sljit_const *const_;
388 	struct sljit_put_label *put_label;
389 
390 	CHECK_ERROR_PTR();
391 	CHECK_PTR(check_sljit_generate_code(compiler));
392 	reverse_buf(compiler);
393 
394 	code = (sljit_ins*)SLJIT_MALLOC_EXEC(compiler->size * sizeof(sljit_ins), compiler->exec_allocator_data);
395 	PTR_FAIL_WITH_EXEC_IF(code);
396 	buf = compiler->buf;
397 
398 	code_ptr = code;
399 	word_count = 0;
400 	next_addr = 0;
401 	executable_offset = SLJIT_EXEC_OFFSET(code);
402 
403 	label = compiler->labels;
404 	jump = compiler->jumps;
405 	const_ = compiler->consts;
406 	put_label = compiler->put_labels;
407 
408 	do {
409 		buf_ptr = (sljit_ins*)buf->memory;
410 		buf_end = buf_ptr + (buf->used_size >> 2);
411 		do {
412 			*code_ptr = *buf_ptr++;
413 			if (next_addr == word_count) {
414 				SLJIT_ASSERT(!label || label->size >= word_count);
415 				SLJIT_ASSERT(!jump || jump->addr >= word_count);
416 				SLJIT_ASSERT(!const_ || const_->addr >= word_count);
417 				SLJIT_ASSERT(!put_label || put_label->addr >= word_count);
418 
419 				/* These structures are ordered by their address. */
420 				if (label && label->size == word_count) {
421 					label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
422 					label->size = (sljit_uw)(code_ptr - code);
423 					label = label->next;
424 				}
425 				if (jump && jump->addr == word_count) {
426 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
427 					word_count += 1;
428 #else
429 					word_count += 5;
430 #endif
431 					jump->addr = (sljit_uw)code_ptr;
432 					code_ptr = detect_jump_type(jump, code, executable_offset);
433 					jump = jump->next;
434 				}
435 				if (const_ && const_->addr == word_count) {
436 					const_->addr = (sljit_uw)code_ptr;
437 					const_ = const_->next;
438 				}
439 				if (put_label && put_label->addr == word_count) {
440 					SLJIT_ASSERT(put_label->label);
441 					put_label->addr = (sljit_uw)code_ptr;
442 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
443 					code_ptr += 1;
444 					word_count += 1;
445 #else
446 					code_ptr += put_label_get_length(put_label, (sljit_uw)(SLJIT_ADD_EXEC_OFFSET(code, executable_offset) + put_label->label->size));
447 					word_count += 5;
448 #endif
449 					put_label = put_label->next;
450 				}
451 				next_addr = compute_next_addr(label, jump, const_, put_label);
452 			}
453 			code_ptr++;
454 			word_count++;
455 		} while (buf_ptr < buf_end);
456 
457 		buf = buf->next;
458 	} while (buf);
459 
460 	if (label && label->size == word_count) {
461 		label->addr = (sljit_uw)code_ptr;
462 		label->size = (sljit_uw)(code_ptr - code);
463 		label = label->next;
464 	}
465 
466 	SLJIT_ASSERT(!label);
467 	SLJIT_ASSERT(!jump);
468 	SLJIT_ASSERT(!const_);
469 	SLJIT_ASSERT(!put_label);
470 	SLJIT_ASSERT(code_ptr - code <= (sljit_sw)compiler->size);
471 
472 	jump = compiler->jumps;
473 	while (jump) {
474 		do {
475 			if (!(jump->flags & (PATCH_B | PATCH_J | PATCH_REL32))) {
476 				load_addr_to_reg(jump, TMP_REG1);
477 				break;
478 			}
479 
480 			addr = (jump->flags & JUMP_LABEL) ? jump->u.label->addr : jump->u.target;
481 			buf_ptr = (sljit_ins *)jump->addr;
482 			addr -= (sljit_uw)SLJIT_ADD_EXEC_OFFSET(buf_ptr, executable_offset);
483 
484 			if (jump->flags & PATCH_B) {
485 				SLJIT_ASSERT((sljit_sw)addr >= BRANCH_MIN && (sljit_sw)addr <= BRANCH_MAX);
486 				addr = ((addr & 0x800) >> 4) | ((addr & 0x1e) << 7) | ((addr & 0x7e0) << 20) | ((addr & 0x1000) << 19);
487 				buf_ptr[0] |= (sljit_ins)addr;
488 				break;
489 			}
490 
491 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
492 			if (jump->flags & PATCH_REL32) {
493 				SLJIT_ASSERT((sljit_sw)addr >= S32_MIN && (sljit_sw)addr <= S32_MAX);
494 
495 				if ((addr & 0x800) != 0)
496 					addr += 0x1000;
497 
498 				buf_ptr[0] = AUIPC | RD(TMP_REG1) | (sljit_ins)((sljit_sw)addr & ~0xfff);
499 				SLJIT_ASSERT((buf_ptr[1] & 0x707f) == JALR);
500 				buf_ptr[1] |= IMM_I(addr);
501 				break;
502 			}
503 #endif
504 
505 			SLJIT_ASSERT((sljit_sw)addr >= JUMP_MIN && (sljit_sw)addr <= JUMP_MAX);
506 			addr = (addr & 0xff000) | ((addr & 0x800) << 9) | ((addr & 0x7fe) << 20) | ((addr & 0x100000) << 11);
507 			buf_ptr[0] = JAL | RD((jump->flags & IS_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | (sljit_ins)addr;
508 		} while (0);
509 		jump = jump->next;
510 	}
511 
512 	put_label = compiler->put_labels;
513 	while (put_label) {
514 		load_addr_to_reg(put_label, 0);
515 		put_label = put_label->next;
516 	}
517 
518 	compiler->error = SLJIT_ERR_COMPILED;
519 	compiler->executable_offset = executable_offset;
520 	compiler->executable_size = (sljit_uw)(code_ptr - code) * sizeof(sljit_ins);
521 
522 	code = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code, executable_offset);
523 	code_ptr = (sljit_ins *)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);
524 
525 	SLJIT_CACHE_FLUSH(code, code_ptr);
526 	SLJIT_UPDATE_WX_FLAGS(code, code_ptr, 1);
527 	return code;
528 }
529 
sljit_has_cpu_feature(sljit_s32 feature_type)530 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)
531 {
532 	switch (feature_type) {
533 	case SLJIT_HAS_FPU:
534 	case SLJIT_HAS_ZERO_REGISTER:
535 		return 1;
536 	default:
537 		return 0;
538 	}
539 }
540 
sljit_cmp_info(sljit_s32 type)541 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)
542 {
543 	return (type >= SLJIT_ORDERED_EQUAL && type <= SLJIT_ORDERED_LESS_EQUAL);
544 }
545 
546 /* --------------------------------------------------------------------- */
547 /*  Entry, exit                                                          */
548 /* --------------------------------------------------------------------- */
549 
550 /* Creates an index in data_transfer_insts array. */
551 #define LOAD_DATA	0x01
552 #define WORD_DATA	0x00
553 #define BYTE_DATA	0x02
554 #define HALF_DATA	0x04
555 #define INT_DATA	0x06
556 #define SIGNED_DATA	0x08
557 /* Separates integer and floating point registers */
558 #define GPR_REG		0x0f
559 #define DOUBLE_DATA	0x10
560 #define SINGLE_DATA	0x12
561 
562 #define MEM_MASK	0x1f
563 
564 #define ARG_TEST	0x00020
565 #define ALT_KEEP_CACHE	0x00040
566 #define CUMULATIVE_OP	0x00080
567 #define IMM_OP		0x00100
568 #define MOVE_OP		0x00200
569 #define SRC2_IMM	0x00400
570 
571 #define UNUSED_DEST	0x00800
572 #define REG_DEST	0x01000
573 #define REG1_SOURCE	0x02000
574 #define REG2_SOURCE	0x04000
575 #define SLOW_SRC1	0x08000
576 #define SLOW_SRC2	0x10000
577 #define SLOW_DEST	0x20000
578 
579 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
580 #define STACK_STORE	SW
581 #define STACK_LOAD	LW
582 #else
583 #define STACK_STORE	SD
584 #define STACK_LOAD	LD
585 #endif
586 
587 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
588 #include "sljitNativeRISCV_32.c"
589 #else
590 #include "sljitNativeRISCV_64.c"
591 #endif
592 
593 #define STACK_MAX_DISTANCE (-SIMM_MIN)
594 
595 static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw);
596 
sljit_emit_enter(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)597 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_enter(struct sljit_compiler *compiler,
598 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
599 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
600 {
601 	sljit_s32 i, tmp, offset;
602 	sljit_s32 saved_arg_count = SLJIT_KEPT_SAVEDS_COUNT(options);
603 
604 	CHECK_ERROR();
605 	CHECK(check_sljit_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
606 	set_emit_enter(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
607 
608 	local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - saved_arg_count, 1);
609 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
610 	if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
611 		if ((local_size & SSIZE_OF(sw)) != 0)
612 			local_size += SSIZE_OF(sw);
613 		local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
614 	}
615 #else
616 	local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
617 #endif
618 	local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
619 	compiler->local_size = local_size;
620 
621 	if (local_size <= STACK_MAX_DISTANCE) {
622 		/* Frequent case. */
623 		FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(-local_size)));
624 		offset = local_size - SSIZE_OF(sw);
625 		local_size = 0;
626 	} else {
627 		FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(STACK_MAX_DISTANCE)));
628 		local_size -= STACK_MAX_DISTANCE;
629 
630 		if (local_size > STACK_MAX_DISTANCE)
631 			FAIL_IF(load_immediate(compiler, TMP_REG1, local_size, TMP_REG3));
632 		offset = STACK_MAX_DISTANCE - SSIZE_OF(sw);
633 	}
634 
635 	FAIL_IF(push_imm_s_inst(compiler, STACK_STORE | RS1(SLJIT_SP) | RS2(RETURN_ADDR_REG), offset));
636 
637 	tmp = SLJIT_S0 - saveds;
638 	for (i = SLJIT_S0 - saved_arg_count; i > tmp; i--) {
639 		offset -= SSIZE_OF(sw);
640 		FAIL_IF(push_imm_s_inst(compiler, STACK_STORE | RS1(SLJIT_SP) | RS2(i), offset));
641 	}
642 
643 	for (i = scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
644 		offset -= SSIZE_OF(sw);
645 		FAIL_IF(push_imm_s_inst(compiler, STACK_STORE | RS1(SLJIT_SP) | RS2(i), offset));
646 	}
647 
648 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
649 	/* This alignment is valid because offset is not used after storing FPU regs. */
650 	if ((offset & SSIZE_OF(sw)) != 0)
651 		offset -= SSIZE_OF(sw);
652 #endif
653 
654 	tmp = SLJIT_FS0 - fsaveds;
655 	for (i = SLJIT_FS0; i > tmp; i--) {
656 		offset -= SSIZE_OF(f64);
657 		FAIL_IF(push_imm_s_inst(compiler, FSD | RS1(SLJIT_SP) | FRS2(i), offset));
658 	}
659 
660 	for (i = fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
661 		offset -= SSIZE_OF(f64);
662 		FAIL_IF(push_imm_s_inst(compiler, FSD | RS1(SLJIT_SP) | FRS2(i), offset));
663 	}
664 
665 	if (local_size > STACK_MAX_DISTANCE)
666 		FAIL_IF(push_inst(compiler, SUB | RD(SLJIT_SP) | RS1(SLJIT_SP) | RS2(TMP_REG1)));
667 	else if (local_size > 0)
668 		FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(-local_size)));
669 
670 	if (options & SLJIT_ENTER_REG_ARG)
671 		return SLJIT_SUCCESS;
672 
673 	arg_types >>= SLJIT_ARG_SHIFT;
674 	saved_arg_count = 0;
675 	tmp = SLJIT_R0;
676 
677 	while (arg_types > 0) {
678 		if ((arg_types & SLJIT_ARG_MASK) < SLJIT_ARG_TYPE_F64) {
679 			if (!(arg_types & SLJIT_ARG_TYPE_SCRATCH_REG)) {
680 				FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_S0 - saved_arg_count) | RS1(tmp) | IMM_I(0)));
681 				saved_arg_count++;
682 			}
683 			tmp++;
684 		}
685 
686 		arg_types >>= SLJIT_ARG_SHIFT;
687 	}
688 
689 	return SLJIT_SUCCESS;
690 }
691 
692 #undef STACK_MAX_DISTANCE
693 
sljit_set_context(struct sljit_compiler * compiler,sljit_s32 options,sljit_s32 arg_types,sljit_s32 scratches,sljit_s32 saveds,sljit_s32 fscratches,sljit_s32 fsaveds,sljit_s32 local_size)694 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_set_context(struct sljit_compiler *compiler,
695 	sljit_s32 options, sljit_s32 arg_types, sljit_s32 scratches, sljit_s32 saveds,
696 	sljit_s32 fscratches, sljit_s32 fsaveds, sljit_s32 local_size)
697 {
698 	CHECK_ERROR();
699 	CHECK(check_sljit_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size));
700 	set_set_context(compiler, options, arg_types, scratches, saveds, fscratches, fsaveds, local_size);
701 
702 	local_size += GET_SAVED_REGISTERS_SIZE(scratches, saveds - SLJIT_KEPT_SAVEDS_COUNT(options), 1);
703 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
704 	if (fsaveds > 0 || fscratches >= SLJIT_FIRST_SAVED_FLOAT_REG) {
705 		if ((local_size & SSIZE_OF(sw)) != 0)
706 			local_size += SSIZE_OF(sw);
707 		local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
708 	}
709 #else
710 	local_size += GET_SAVED_FLOAT_REGISTERS_SIZE(fscratches, fsaveds, sizeof(sljit_f64));
711 #endif
712 	compiler->local_size = (local_size + SLJIT_LOCALS_OFFSET + 15) & ~0xf;
713 
714 	return SLJIT_SUCCESS;
715 }
716 
717 #define STACK_MAX_DISTANCE (-SIMM_MIN - 16)
718 
emit_stack_frame_release(struct sljit_compiler * compiler,sljit_s32 is_return_to)719 static sljit_s32 emit_stack_frame_release(struct sljit_compiler *compiler, sljit_s32 is_return_to)
720 {
721 	sljit_s32 i, tmp, offset;
722 	sljit_s32 local_size = compiler->local_size;
723 
724 	if (local_size > STACK_MAX_DISTANCE) {
725 		local_size -= STACK_MAX_DISTANCE;
726 
727 		if (local_size > STACK_MAX_DISTANCE) {
728 			FAIL_IF(load_immediate(compiler, TMP_REG2, local_size, TMP_REG3));
729 			FAIL_IF(push_inst(compiler, ADD | RD(SLJIT_SP) | RS1(SLJIT_SP) | RS2(TMP_REG2)));
730 		} else
731 			FAIL_IF(push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(local_size)));
732 
733 		local_size = STACK_MAX_DISTANCE;
734 	}
735 
736 	SLJIT_ASSERT(local_size > 0);
737 
738 	offset = local_size - SSIZE_OF(sw);
739 	if (!is_return_to)
740 		FAIL_IF(push_inst(compiler, STACK_LOAD | RD(RETURN_ADDR_REG) | RS1(SLJIT_SP) | IMM_I(offset)));
741 
742 	tmp = SLJIT_S0 - compiler->saveds;
743 	for (i = SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options); i > tmp; i--) {
744 		offset -= SSIZE_OF(sw);
745 		FAIL_IF(push_inst(compiler, STACK_LOAD | RD(i) | RS1(SLJIT_SP) | IMM_I(offset)));
746 	}
747 
748 	for (i = compiler->scratches; i >= SLJIT_FIRST_SAVED_REG; i--) {
749 		offset -= SSIZE_OF(sw);
750 		FAIL_IF(push_inst(compiler, STACK_LOAD | RD(i) | RS1(SLJIT_SP) | IMM_I(offset)));
751 	}
752 
753 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
754 	/* This alignment is valid because offset is not used after storing FPU regs. */
755 	if ((offset & SSIZE_OF(sw)) != 0)
756 		offset -= SSIZE_OF(sw);
757 #endif
758 
759 	tmp = SLJIT_FS0 - compiler->fsaveds;
760 	for (i = SLJIT_FS0; i > tmp; i--) {
761 		offset -= SSIZE_OF(f64);
762 		FAIL_IF(push_inst(compiler, FLD | FRD(i) | RS1(SLJIT_SP) | IMM_I(offset)));
763 	}
764 
765 	for (i = compiler->fscratches; i >= SLJIT_FIRST_SAVED_FLOAT_REG; i--) {
766 		offset -= SSIZE_OF(f64);
767 		FAIL_IF(push_inst(compiler, FLD | FRD(i) | RS1(SLJIT_SP) | IMM_I(offset)));
768 	}
769 
770 	return push_inst(compiler, ADDI | RD(SLJIT_SP) | RS1(SLJIT_SP) | IMM_I(local_size));
771 }
772 
773 #undef STACK_MAX_DISTANCE
774 
sljit_emit_return_void(struct sljit_compiler * compiler)775 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_void(struct sljit_compiler *compiler)
776 {
777 	CHECK_ERROR();
778 	CHECK(check_sljit_emit_return_void(compiler));
779 
780 	FAIL_IF(emit_stack_frame_release(compiler, 0));
781 	return push_inst(compiler, JALR | RD(TMP_ZERO) | RS1(RETURN_ADDR_REG) | IMM_I(0));
782 }
783 
sljit_emit_return_to(struct sljit_compiler * compiler,sljit_s32 src,sljit_sw srcw)784 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_return_to(struct sljit_compiler *compiler,
785 	sljit_s32 src, sljit_sw srcw)
786 {
787 	CHECK_ERROR();
788 	CHECK(check_sljit_emit_return_to(compiler, src, srcw));
789 
790 	if (src & SLJIT_MEM) {
791 		ADJUST_LOCAL_OFFSET(src, srcw);
792 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
793 		src = TMP_REG1;
794 		srcw = 0;
795 	} else if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
796 		FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(src) | IMM_I(0)));
797 		src = TMP_REG1;
798 		srcw = 0;
799 	}
800 
801 	FAIL_IF(emit_stack_frame_release(compiler, 1));
802 
803 	SLJIT_SKIP_CHECKS(compiler);
804 	return sljit_emit_ijump(compiler, SLJIT_JUMP, src, srcw);
805 }
806 
807 /* --------------------------------------------------------------------- */
808 /*  Operators                                                            */
809 /* --------------------------------------------------------------------- */
810 
811 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
812 #define ARCH_32_64(a, b)	a
813 #else
814 #define ARCH_32_64(a, b)	b
815 #endif
816 
817 static const sljit_ins data_transfer_insts[16 + 4] = {
818 /* u w s */ ARCH_32_64(F3(0x2) | OPC(0x23) /* sw */, F3(0x3) | OPC(0x23) /* sd */),
819 /* u w l */ ARCH_32_64(F3(0x2) | OPC(0x3) /* lw */, F3(0x3) | OPC(0x3) /* ld */),
820 /* u b s */ F3(0x0) | OPC(0x23) /* sb */,
821 /* u b l */ F3(0x4) | OPC(0x3) /* lbu */,
822 /* u h s */ F3(0x1) | OPC(0x23) /* sh */,
823 /* u h l */ F3(0x5) | OPC(0x3) /* lhu */,
824 /* u i s */ F3(0x2) | OPC(0x23) /* sw */,
825 /* u i l */ ARCH_32_64(F3(0x2) | OPC(0x3) /* lw */, F3(0x6) | OPC(0x3) /* lwu */),
826 
827 /* s w s */ ARCH_32_64(F3(0x2) | OPC(0x23) /* sw */, F3(0x3) | OPC(0x23) /* sd */),
828 /* s w l */ ARCH_32_64(F3(0x2) | OPC(0x3) /* lw */, F3(0x3) | OPC(0x3) /* ld */),
829 /* s b s */ F3(0x0) | OPC(0x23) /* sb */,
830 /* s b l */ F3(0x0) | OPC(0x3) /* lb */,
831 /* s h s */ F3(0x1) | OPC(0x23) /* sh */,
832 /* s h l */ F3(0x1) | OPC(0x3) /* lh */,
833 /* s i s */ F3(0x2) | OPC(0x23) /* sw */,
834 /* s i l */ F3(0x2) | OPC(0x3) /* lw */,
835 
836 /* d   s */ F3(0x3) | OPC(0x27) /* fsd */,
837 /* d   l */ F3(0x3) | OPC(0x7) /* fld */,
838 /* s   s */ F3(0x2) | OPC(0x27) /* fsw */,
839 /* s   l */ F3(0x2) | OPC(0x7) /* flw */,
840 };
841 
842 #undef ARCH_32_64
843 
push_mem_inst(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 base,sljit_sw offset)844 static sljit_s32 push_mem_inst(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 base, sljit_sw offset)
845 {
846 	sljit_ins ins;
847 
848 	SLJIT_ASSERT(FAST_IS_REG(base) && offset <= 0xfff && offset >= SIMM_MIN);
849 
850 	ins = data_transfer_insts[flags & MEM_MASK] | RS1(base);
851 	if (flags & LOAD_DATA)
852 		ins |= ((flags & MEM_MASK) <= GPR_REG ? RD(reg) : FRD(reg)) | IMM_I(offset);
853 	else
854 		ins |= ((flags & MEM_MASK) <= GPR_REG ? RS2(reg) : FRS2(reg)) | IMM_S(offset);
855 
856 	return push_inst(compiler, ins);
857 }
858 
859 /* Can perform an operation using at most 1 instruction. */
getput_arg_fast(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw)860 static sljit_s32 getput_arg_fast(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
861 {
862 
863 	SLJIT_ASSERT(arg & SLJIT_MEM);
864 
865 	if (!(arg & OFFS_REG_MASK) && argw <= SIMM_MAX && argw >= SIMM_MIN) {
866 		/* Works for both absoulte and relative addresses. */
867 		if (SLJIT_UNLIKELY(flags & ARG_TEST))
868 			return 1;
869 
870 		FAIL_IF(push_mem_inst(compiler, flags, reg, arg & REG_MASK, argw));
871 		return -1;
872 	}
873 	return 0;
874 }
875 
876 #define TO_ARGW_HI(argw) (((argw) & ~0xfff) + (((argw) & 0x800) ? 0x1000 : 0))
877 
878 /* See getput_arg below.
879    Note: can_cache is called only for binary operators. */
can_cache(sljit_s32 arg,sljit_sw argw,sljit_s32 next_arg,sljit_sw next_argw)880 static sljit_s32 can_cache(sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
881 {
882 	SLJIT_ASSERT((arg & SLJIT_MEM) && (next_arg & SLJIT_MEM));
883 
884 	/* Simple operation except for updates. */
885 	if (arg & OFFS_REG_MASK) {
886 		argw &= 0x3;
887 		next_argw &= 0x3;
888 		if (argw && argw == next_argw && (arg == next_arg || (arg & OFFS_REG_MASK) == (next_arg & OFFS_REG_MASK)))
889 			return 1;
890 		return 0;
891 	}
892 
893 	if (arg == next_arg) {
894 		if (((next_argw - argw) <= SIMM_MAX && (next_argw - argw) >= SIMM_MIN)
895 				|| TO_ARGW_HI(argw) == TO_ARGW_HI(next_argw))
896 			return 1;
897 		return 0;
898 	}
899 
900 	return 0;
901 }
902 
903 /* Emit the necessary instructions. See can_cache above. */
getput_arg(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw,sljit_s32 next_arg,sljit_sw next_argw)904 static sljit_s32 getput_arg(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw, sljit_s32 next_arg, sljit_sw next_argw)
905 {
906 	sljit_s32 base = arg & REG_MASK;
907 	sljit_s32 tmp_r = TMP_REG1;
908 	sljit_sw offset, argw_hi;
909 
910 	SLJIT_ASSERT(arg & SLJIT_MEM);
911 	if (!(next_arg & SLJIT_MEM)) {
912 		next_arg = 0;
913 		next_argw = 0;
914 	}
915 
916 	/* Since tmp can be the same as base or offset registers,
917 	 * these might be unavailable after modifying tmp. */
918 	if ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA))
919 		tmp_r = reg;
920 
921 	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
922 		argw &= 0x3;
923 
924 		/* Using the cache. */
925 		if (argw == compiler->cache_argw) {
926 			if (arg == compiler->cache_arg)
927 				return push_mem_inst(compiler, flags, reg, TMP_REG3, 0);
928 
929 			if ((SLJIT_MEM | (arg & OFFS_REG_MASK)) == compiler->cache_arg) {
930 				if (arg == next_arg && argw == (next_argw & 0x3)) {
931 					compiler->cache_arg = arg;
932 					compiler->cache_argw = argw;
933 					FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG3) | RS1(TMP_REG3) | RS2(base)));
934 					return push_mem_inst(compiler, flags, reg, TMP_REG3, 0);
935 				}
936 				FAIL_IF(push_inst(compiler, ADD | RD(tmp_r) | RS1(base) | RS2(TMP_REG3)));
937 				return push_mem_inst(compiler, flags, reg, tmp_r, 0);
938 			}
939 		}
940 
941 		if (SLJIT_UNLIKELY(argw)) {
942 			compiler->cache_arg = SLJIT_MEM | (arg & OFFS_REG_MASK);
943 			compiler->cache_argw = argw;
944 			FAIL_IF(push_inst(compiler, SLLI | RD(TMP_REG3) | RS1(OFFS_REG(arg)) | IMM_I(argw)));
945 		}
946 
947 		if (arg == next_arg && argw == (next_argw & 0x3)) {
948 			compiler->cache_arg = arg;
949 			compiler->cache_argw = argw;
950 			FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG3) | RS1(base) | RS2(!argw ? OFFS_REG(arg) : TMP_REG3)));
951 			tmp_r = TMP_REG3;
952 		}
953 		else
954 			FAIL_IF(push_inst(compiler, ADD | RD(tmp_r) | RS1(base) | RS2(!argw ? OFFS_REG(arg) : TMP_REG3)));
955 		return push_mem_inst(compiler, flags, reg, tmp_r, 0);
956 	}
957 
958 	if (compiler->cache_arg == arg && argw - compiler->cache_argw <= SIMM_MAX && argw - compiler->cache_argw >= SIMM_MIN)
959 		return push_mem_inst(compiler, flags, reg, TMP_REG3, argw - compiler->cache_argw);
960 
961 	if (compiler->cache_arg == SLJIT_MEM && (argw - compiler->cache_argw <= SIMM_MAX) && (argw - compiler->cache_argw >= SIMM_MIN)) {
962 		offset = argw - compiler->cache_argw;
963 	} else {
964 		compiler->cache_arg = SLJIT_MEM;
965 
966 		argw_hi = TO_ARGW_HI(argw);
967 
968 		if (next_arg && next_argw - argw <= SIMM_MAX && next_argw - argw >= SIMM_MIN && argw_hi != TO_ARGW_HI(next_argw)) {
969 			FAIL_IF(load_immediate(compiler, TMP_REG3, argw, tmp_r));
970 			compiler->cache_argw = argw;
971 			offset = 0;
972 		} else {
973 			FAIL_IF(load_immediate(compiler, TMP_REG3, argw_hi, tmp_r));
974 			compiler->cache_argw = argw_hi;
975 			offset = argw & 0xfff;
976 			argw = argw_hi;
977 		}
978 	}
979 
980 	if (!base)
981 		return push_mem_inst(compiler, flags, reg, TMP_REG3, offset);
982 
983 	if (arg == next_arg && next_argw - argw <= SIMM_MAX && next_argw - argw >= SIMM_MIN) {
984 		compiler->cache_arg = arg;
985 		FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG3) | RS1(TMP_REG3) | RS2(base)));
986 		return push_mem_inst(compiler, flags, reg, TMP_REG3, offset);
987 	}
988 
989 	FAIL_IF(push_inst(compiler, ADD | RD(tmp_r) | RS1(TMP_REG3) | RS2(base)));
990 	return push_mem_inst(compiler, flags, reg, tmp_r, offset);
991 }
992 
emit_op_mem(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 arg,sljit_sw argw)993 static sljit_s32 emit_op_mem(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg, sljit_sw argw)
994 {
995 	sljit_s32 base = arg & REG_MASK;
996 	sljit_s32 tmp_r = TMP_REG1;
997 
998 	if (getput_arg_fast(compiler, flags, reg, arg, argw))
999 		return compiler->error;
1000 
1001 	if ((flags & MEM_MASK) <= GPR_REG && (flags & LOAD_DATA))
1002 		tmp_r = reg;
1003 
1004 	if (SLJIT_UNLIKELY(arg & OFFS_REG_MASK)) {
1005 		argw &= 0x3;
1006 
1007 		if (SLJIT_UNLIKELY(argw)) {
1008 			FAIL_IF(push_inst(compiler, SLLI | RD(tmp_r) | RS1(OFFS_REG(arg)) | IMM_I(argw)));
1009 			FAIL_IF(push_inst(compiler, ADD | RD(tmp_r) | RS1(tmp_r) | RS2(base)));
1010 		}
1011 		else
1012 			FAIL_IF(push_inst(compiler, ADD | RD(tmp_r) | RS1(base) | RS2(OFFS_REG(arg))));
1013 
1014 		argw = 0;
1015 	} else {
1016 		FAIL_IF(load_immediate(compiler, tmp_r, TO_ARGW_HI(argw), TMP_REG3));
1017 
1018 		if (base != 0)
1019 			FAIL_IF(push_inst(compiler, ADD | RD(tmp_r) | RS1(tmp_r) | RS2(base)));
1020 	}
1021 
1022 	return push_mem_inst(compiler, flags, reg, tmp_r, argw & 0xfff);
1023 }
1024 
emit_op_mem2(struct sljit_compiler * compiler,sljit_s32 flags,sljit_s32 reg,sljit_s32 arg1,sljit_sw arg1w,sljit_s32 arg2,sljit_sw arg2w)1025 static SLJIT_INLINE sljit_s32 emit_op_mem2(struct sljit_compiler *compiler, sljit_s32 flags, sljit_s32 reg, sljit_s32 arg1, sljit_sw arg1w, sljit_s32 arg2, sljit_sw arg2w)
1026 {
1027 	if (getput_arg_fast(compiler, flags, reg, arg1, arg1w))
1028 		return compiler->error;
1029 	return getput_arg(compiler, flags, reg, arg1, arg1w, arg2, arg2w);
1030 }
1031 
1032 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
1033 #define WORD 0
1034 #define IMM_EXTEND(v) (IMM_I(v))
1035 #else /* !SLJIT_CONFIG_RISCV_32 */
1036 #define WORD word
1037 #define IMM_EXTEND(v) (IMM_I((op & SLJIT_32) ? (v) : (32 + (v))))
1038 #endif /* SLJIT_CONFIG_RISCV_32 */
1039 
emit_clz_ctz(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw src)1040 static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw src)
1041 {
1042 	sljit_s32 is_clz = (GET_OPCODE(op) == SLJIT_CLZ);
1043 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1044 	sljit_ins word = (op & SLJIT_32) >> 5;
1045 	sljit_ins max = (op & SLJIT_32) ? 32 : 64;
1046 #else /* !SLJIT_CONFIG_RISCV_64 */
1047 	sljit_ins max = 32;
1048 #endif /* SLJIT_CONFIG_RISCV_64 */
1049 
1050 	SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
1051 
1052 	/* The OTHER_FLAG is the counter. */
1053 	FAIL_IF(push_inst(compiler, ADDI | WORD | RD(OTHER_FLAG) | RS1(TMP_ZERO) | IMM_I(max)));
1054 
1055 	/* The TMP_REG2 is the next value. */
1056 	if (src != TMP_REG2)
1057 		FAIL_IF(push_inst(compiler, ADDI | WORD | RD(TMP_REG2) | RS1(src) | IMM_I(0)));
1058 
1059 	FAIL_IF(push_inst(compiler, BEQ | RS1(TMP_REG2) | RS2(TMP_ZERO) | ((sljit_ins)((is_clz ? 4 : 5) * SSIZE_OF(ins)) << 7) | ((sljit_ins)(8 * SSIZE_OF(ins)) << 20)));
1060 
1061 	FAIL_IF(push_inst(compiler, ADDI | WORD | RD(OTHER_FLAG) | RS1(TMP_ZERO) | IMM_I(0)));
1062 	if (!is_clz) {
1063 		FAIL_IF(push_inst(compiler, ANDI | RD(TMP_REG1) | RS1(TMP_REG2) | IMM_I(1)));
1064 		FAIL_IF(push_inst(compiler, BNE | RS1(TMP_REG1) | RS2(TMP_ZERO) | ((sljit_ins)(2 * SSIZE_OF(ins)) << 7) | ((sljit_ins)(8 * SSIZE_OF(ins)) << 20)));
1065 	} else
1066 		FAIL_IF(push_inst(compiler, BLT | RS1(TMP_REG2) | RS2(TMP_ZERO) | ((sljit_ins)(2 * SSIZE_OF(ins)) << 7) | ((sljit_ins)(8 * SSIZE_OF(ins)) << 20)));
1067 
1068 	/* The TMP_REG1 is the next shift. */
1069 	FAIL_IF(push_inst(compiler, ADDI | WORD | RD(TMP_REG1) | RS1(TMP_ZERO) | IMM_I(max)));
1070 
1071 	FAIL_IF(push_inst(compiler, ADDI | WORD | RD(EQUAL_FLAG) | RS1(TMP_REG2) | IMM_I(0)));
1072 	FAIL_IF(push_inst(compiler, SRLI | WORD | RD(TMP_REG1) | RS1(TMP_REG1) | IMM_I(1)));
1073 
1074 	FAIL_IF(push_inst(compiler, (is_clz ? SRL : SLL) | WORD | RD(TMP_REG2) | RS1(EQUAL_FLAG) | RS2(TMP_REG1)));
1075 	FAIL_IF(push_inst(compiler, BNE | RS1(TMP_REG2) | RS2(TMP_ZERO) | ((sljit_ins)0xfe000e80 - ((2 * SSIZE_OF(ins)) << 7))));
1076 	FAIL_IF(push_inst(compiler, ADDI | WORD | RD(TMP_REG2) | RS1(TMP_REG1) | IMM_I(-1)));
1077 	FAIL_IF(push_inst(compiler, (is_clz ? SRL : SLL) | WORD | RD(TMP_REG2) | RS1(EQUAL_FLAG) | RS2(TMP_REG2)));
1078 	FAIL_IF(push_inst(compiler, OR | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(TMP_REG1)));
1079 	FAIL_IF(push_inst(compiler, BEQ | RS1(TMP_REG2) | RS2(TMP_ZERO) | ((sljit_ins)0xfe000e80 - ((5 * SSIZE_OF(ins)) << 7))));
1080 
1081 	return push_inst(compiler, ADDI | WORD | RD(dst) | RS1(OTHER_FLAG) | IMM_I(0));
1082 }
1083 
1084 #define EMIT_LOGICAL(op_imm, op_reg) \
1085 	if (flags & SRC2_IMM) { \
1086 		if (op & SLJIT_SET_Z) \
1087 			FAIL_IF(push_inst(compiler, op_imm | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(src2))); \
1088 		if (!(flags & UNUSED_DEST)) \
1089 			FAIL_IF(push_inst(compiler, op_imm | RD(dst) | RS1(src1) | IMM_I(src2))); \
1090 	} \
1091 	else { \
1092 		if (op & SLJIT_SET_Z) \
1093 			FAIL_IF(push_inst(compiler, op_reg | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2))); \
1094 		if (!(flags & UNUSED_DEST)) \
1095 			FAIL_IF(push_inst(compiler, op_reg | RD(dst) | RS1(src1) | RS2(src2))); \
1096 	}
1097 
1098 #define EMIT_SHIFT(imm, reg) \
1099 	op_imm = (imm); \
1100 	op_reg = (reg);
1101 
emit_single_op(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 flags,sljit_s32 dst,sljit_s32 src1,sljit_sw src2)1102 static SLJIT_INLINE sljit_s32 emit_single_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
1103 	sljit_s32 dst, sljit_s32 src1, sljit_sw src2)
1104 {
1105 	sljit_s32 is_overflow, is_carry, carry_src_r, is_handled;
1106 	sljit_ins op_imm, op_reg;
1107 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1108 	sljit_ins word = (op & SLJIT_32) >> 5;
1109 #endif /* SLJIT_CONFIG_RISCV_64 */
1110 
1111 	SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
1112 
1113 	switch (GET_OPCODE(op)) {
1114 	case SLJIT_MOV:
1115 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1116 		if (dst != src2)
1117 			return push_inst(compiler, ADDI | RD(dst) | RS1(src2) | IMM_I(0));
1118 		return SLJIT_SUCCESS;
1119 
1120 	case SLJIT_MOV_U8:
1121 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1122 		if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
1123 			return push_inst(compiler, ANDI | RD(dst) | RS1(src2) | IMM_I(0xff));
1124 		SLJIT_ASSERT(dst == src2);
1125 		return SLJIT_SUCCESS;
1126 
1127 	case SLJIT_MOV_S8:
1128 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1129 		if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
1130 			FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src2) | IMM_EXTEND(24)));
1131 			return push_inst(compiler, SRAI | WORD | RD(dst) | RS1(dst) | IMM_EXTEND(24));
1132 		}
1133 		SLJIT_ASSERT(dst == src2);
1134 		return SLJIT_SUCCESS;
1135 
1136 	case SLJIT_MOV_U16:
1137 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1138 		if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
1139 			FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src2) | IMM_EXTEND(16)));
1140 			return push_inst(compiler, SRLI | WORD | RD(dst) | RS1(dst) | IMM_EXTEND(16));
1141 		}
1142 		SLJIT_ASSERT(dst == src2);
1143 		return SLJIT_SUCCESS;
1144 
1145 	case SLJIT_MOV_S16:
1146 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1147 		if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
1148 			FAIL_IF(push_inst(compiler, SLLI | WORD | RD(dst) | RS1(src2) | IMM_EXTEND(16)));
1149 			return push_inst(compiler, SRAI | WORD | RD(dst) | RS1(dst) | IMM_EXTEND(16));
1150 		}
1151 		SLJIT_ASSERT(dst == src2);
1152 		return SLJIT_SUCCESS;
1153 
1154 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1155 	case SLJIT_MOV_U32:
1156 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1157 		if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE)) {
1158 			FAIL_IF(push_inst(compiler, SLLI | RD(dst) | RS1(src2) | IMM_I(32)));
1159 			return push_inst(compiler, SRLI | RD(dst) | RS1(dst) | IMM_I(32));
1160 		}
1161 		SLJIT_ASSERT(dst == src2);
1162 		return SLJIT_SUCCESS;
1163 
1164 	case SLJIT_MOV_S32:
1165 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1166 		if ((flags & (REG_DEST | REG2_SOURCE)) == (REG_DEST | REG2_SOURCE))
1167 			return push_inst(compiler, ADDI | 0x8 | RD(dst) | RS1(src2) | IMM_I(0));
1168 		SLJIT_ASSERT(dst == src2);
1169 		return SLJIT_SUCCESS;
1170 #endif /* SLJIT_CONFIG_RISCV_64 */
1171 
1172 	case SLJIT_CLZ:
1173 	case SLJIT_CTZ:
1174 		SLJIT_ASSERT(src1 == TMP_REG1 && !(flags & SRC2_IMM));
1175 		return emit_clz_ctz(compiler, op, dst, src2);
1176 
1177 	case SLJIT_ADD:
1178 		/* Overflow computation (both add and sub): overflow = src1_sign ^ src2_sign ^ result_sign ^ carry_flag */
1179 		is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
1180 		carry_src_r = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
1181 
1182 		if (flags & SRC2_IMM) {
1183 			if (is_overflow) {
1184 				if (src2 >= 0)
1185 					FAIL_IF(push_inst(compiler, ADDI | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(0)));
1186 				else
1187 					FAIL_IF(push_inst(compiler, XORI | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(-1)));
1188 			}
1189 			else if (op & SLJIT_SET_Z)
1190 				FAIL_IF(push_inst(compiler, ADDI | WORD | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(src2)));
1191 
1192 			/* Only the zero flag is needed. */
1193 			if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
1194 				FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(src2)));
1195 		}
1196 		else {
1197 			if (is_overflow)
1198 				FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1199 			else if (op & SLJIT_SET_Z)
1200 				FAIL_IF(push_inst(compiler, ADD | WORD | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1201 
1202 			if (is_overflow || carry_src_r != 0) {
1203 				if (src1 != dst)
1204 					carry_src_r = (sljit_s32)src1;
1205 				else if (src2 != dst)
1206 					carry_src_r = (sljit_s32)src2;
1207 				else {
1208 					FAIL_IF(push_inst(compiler, ADDI | RD(OTHER_FLAG) | RS1(src1) | IMM_I(0)));
1209 					carry_src_r = OTHER_FLAG;
1210 				}
1211 			}
1212 
1213 			/* Only the zero flag is needed. */
1214 			if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
1215 				FAIL_IF(push_inst(compiler, ADD | WORD | RD(dst) | RS1(src1) | RS2(src2)));
1216 		}
1217 
1218 		/* Carry is zero if a + b >= a or a + b >= b, otherwise it is 1. */
1219 		if (is_overflow || carry_src_r != 0) {
1220 			if (flags & SRC2_IMM)
1221 				FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RS1(dst) | IMM_I(src2)));
1222 			else
1223 				FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(dst) | RS2(carry_src_r)));
1224 		}
1225 
1226 		if (!is_overflow)
1227 			return SLJIT_SUCCESS;
1228 
1229 		FAIL_IF(push_inst(compiler, XOR | RD(TMP_REG1) | RS1(dst) | RS2(EQUAL_FLAG)));
1230 		if (op & SLJIT_SET_Z)
1231 			FAIL_IF(push_inst(compiler, ADDI | RD(EQUAL_FLAG) | RS1(dst) | IMM_I(0)));
1232 		FAIL_IF(push_inst(compiler, SRLI | WORD | RD(TMP_REG1) | RS1(TMP_REG1) | IMM_EXTEND(31)));
1233 		return push_inst(compiler, XOR | RD(OTHER_FLAG) | RS1(TMP_REG1) | RS2(OTHER_FLAG));
1234 
1235 	case SLJIT_ADDC:
1236 		carry_src_r = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
1237 
1238 		if (flags & SRC2_IMM) {
1239 			FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(src2)));
1240 		} else {
1241 			if (carry_src_r != 0) {
1242 				if (src1 != dst)
1243 					carry_src_r = (sljit_s32)src1;
1244 				else if (src2 != dst)
1245 					carry_src_r = (sljit_s32)src2;
1246 				else {
1247 					FAIL_IF(push_inst(compiler, ADDI | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(0)));
1248 					carry_src_r = EQUAL_FLAG;
1249 				}
1250 			}
1251 
1252 			FAIL_IF(push_inst(compiler, ADD | WORD | RD(dst) | RS1(src1) | RS2(src2)));
1253 		}
1254 
1255 		/* Carry is zero if a + b >= a or a + b >= b, otherwise it is 1. */
1256 		if (carry_src_r != 0) {
1257 			if (flags & SRC2_IMM)
1258 				FAIL_IF(push_inst(compiler, SLTUI | RD(EQUAL_FLAG) | RS1(dst) | IMM_I(src2)));
1259 			else
1260 				FAIL_IF(push_inst(compiler, SLTU | RD(EQUAL_FLAG) | RS1(dst) | RS2(carry_src_r)));
1261 		}
1262 
1263 		FAIL_IF(push_inst(compiler, ADD | WORD | RD(dst) | RS1(dst) | RS2(OTHER_FLAG)));
1264 
1265 		if (carry_src_r == 0)
1266 			return SLJIT_SUCCESS;
1267 
1268 		/* Set ULESS_FLAG (dst == 0) && (OTHER_FLAG == 1). */
1269 		FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(dst) | RS2(OTHER_FLAG)));
1270 		/* Set carry flag. */
1271 		return push_inst(compiler, OR | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(EQUAL_FLAG));
1272 
1273 	case SLJIT_SUB:
1274 		if ((flags & SRC2_IMM) && src2 == SIMM_MIN) {
1275 			FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG2) | RS1(TMP_ZERO) | IMM_I(src2)));
1276 			src2 = TMP_REG2;
1277 			flags &= ~SRC2_IMM;
1278 		}
1279 
1280 		is_handled = 0;
1281 
1282 		if (flags & SRC2_IMM) {
1283 			if (GET_FLAG_TYPE(op) == SLJIT_LESS || GET_FLAG_TYPE(op) == SLJIT_GREATER_EQUAL) {
1284 				FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RS1(src1) | IMM_I(src2)));
1285 				is_handled = 1;
1286 			}
1287 			else if (GET_FLAG_TYPE(op) == SLJIT_SIG_LESS || GET_FLAG_TYPE(op) == SLJIT_SIG_GREATER_EQUAL) {
1288 				FAIL_IF(push_inst(compiler, SLTI | RD(OTHER_FLAG) | RS1(src1) | IMM_I(src2)));
1289 				is_handled = 1;
1290 			}
1291 		}
1292 
1293 		if (!is_handled && GET_FLAG_TYPE(op) >= SLJIT_LESS && GET_FLAG_TYPE(op) <= SLJIT_SIG_LESS_EQUAL) {
1294 			is_handled = 1;
1295 
1296 			if (flags & SRC2_IMM) {
1297 				FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG2) | RS1(TMP_ZERO) | IMM_I(src2)));
1298 				src2 = TMP_REG2;
1299 				flags &= ~SRC2_IMM;
1300 			}
1301 
1302 			switch (GET_FLAG_TYPE(op)) {
1303 			case SLJIT_LESS:
1304 			case SLJIT_GREATER_EQUAL:
1305 				FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
1306 				break;
1307 			case SLJIT_GREATER:
1308 			case SLJIT_LESS_EQUAL:
1309 				FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(src2) | RS2(src1)));
1310 				break;
1311 			case SLJIT_SIG_LESS:
1312 			case SLJIT_SIG_GREATER_EQUAL:
1313 				FAIL_IF(push_inst(compiler, SLT | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
1314 				break;
1315 			case SLJIT_SIG_GREATER:
1316 			case SLJIT_SIG_LESS_EQUAL:
1317 				FAIL_IF(push_inst(compiler, SLT | RD(OTHER_FLAG) | RS1(src2) | RS2(src1)));
1318 				break;
1319 			}
1320 		}
1321 
1322 		if (is_handled) {
1323 			if (flags & SRC2_IMM) {
1324 				if (op & SLJIT_SET_Z)
1325 					FAIL_IF(push_inst(compiler, ADDI | WORD | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(-src2)));
1326 				if (!(flags & UNUSED_DEST))
1327 					return push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(-src2));
1328 			}
1329 			else {
1330 				if (op & SLJIT_SET_Z)
1331 					FAIL_IF(push_inst(compiler, SUB | WORD | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1332 				if (!(flags & UNUSED_DEST))
1333 					return push_inst(compiler, SUB | WORD | RD(dst) | RS1(src1) | RS2(src2));
1334 			}
1335 			return SLJIT_SUCCESS;
1336 		}
1337 
1338 		is_overflow = GET_FLAG_TYPE(op) == SLJIT_OVERFLOW;
1339 		is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
1340 
1341 		if (flags & SRC2_IMM) {
1342 			if (is_overflow) {
1343 				if (src2 >= 0)
1344 					FAIL_IF(push_inst(compiler, ADDI | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(0)));
1345 				else
1346 					FAIL_IF(push_inst(compiler, XORI | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(-1)));
1347 			}
1348 			else if (op & SLJIT_SET_Z)
1349 				FAIL_IF(push_inst(compiler, ADDI | WORD | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(-src2)));
1350 
1351 			if (is_overflow || is_carry)
1352 				FAIL_IF(push_inst(compiler, SLTUI | RD(OTHER_FLAG) | RS1(src1) | IMM_I(src2)));
1353 
1354 			/* Only the zero flag is needed. */
1355 			if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
1356 				FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(-src2)));
1357 		}
1358 		else {
1359 			if (is_overflow)
1360 				FAIL_IF(push_inst(compiler, XOR | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1361 			else if (op & SLJIT_SET_Z)
1362 				FAIL_IF(push_inst(compiler, SUB | WORD | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1363 
1364 			if (is_overflow || is_carry)
1365 				FAIL_IF(push_inst(compiler, SLTU | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
1366 
1367 			/* Only the zero flag is needed. */
1368 			if (!(flags & UNUSED_DEST) || (op & VARIABLE_FLAG_MASK))
1369 				FAIL_IF(push_inst(compiler, SUB | WORD | RD(dst) | RS1(src1) | RS2(src2)));
1370 		}
1371 
1372 		if (!is_overflow)
1373 			return SLJIT_SUCCESS;
1374 
1375 		FAIL_IF(push_inst(compiler, XOR | RD(TMP_REG1) | RS1(dst) | RS2(EQUAL_FLAG)));
1376 		if (op & SLJIT_SET_Z)
1377 			FAIL_IF(push_inst(compiler, ADDI | RD(EQUAL_FLAG) | RS1(dst) | IMM_I(0)));
1378 		FAIL_IF(push_inst(compiler, SRLI | WORD | RD(TMP_REG1) | RS1(TMP_REG1) | IMM_EXTEND(31)));
1379 		return push_inst(compiler, XOR | RD(OTHER_FLAG) | RS1(TMP_REG1) | RS2(OTHER_FLAG));
1380 
1381 	case SLJIT_SUBC:
1382 		if ((flags & SRC2_IMM) && src2 == SIMM_MIN) {
1383 			FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG2) | RS1(TMP_ZERO) | IMM_I(src2)));
1384 			src2 = TMP_REG2;
1385 			flags &= ~SRC2_IMM;
1386 		}
1387 
1388 		is_carry = GET_FLAG_TYPE(op) == GET_FLAG_TYPE(SLJIT_SET_CARRY);
1389 
1390 		if (flags & SRC2_IMM) {
1391 			if (is_carry)
1392 				FAIL_IF(push_inst(compiler, SLTUI | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(src2)));
1393 
1394 			FAIL_IF(push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(-src2)));
1395 		}
1396 		else {
1397 			if (is_carry)
1398 				FAIL_IF(push_inst(compiler, SLTU | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1399 
1400 			FAIL_IF(push_inst(compiler, SUB | WORD | RD(dst) | RS1(src1) | RS2(src2)));
1401 		}
1402 
1403 		if (is_carry)
1404 			FAIL_IF(push_inst(compiler, SLTU | RD(TMP_REG1) | RS1(dst) | RS2(OTHER_FLAG)));
1405 
1406 		FAIL_IF(push_inst(compiler, SUB | WORD | RD(dst) | RS1(dst) | RS2(OTHER_FLAG)));
1407 
1408 		if (!is_carry)
1409 			return SLJIT_SUCCESS;
1410 
1411 		return push_inst(compiler, OR | RD(OTHER_FLAG) | RS1(EQUAL_FLAG) | RS2(TMP_REG1));
1412 
1413 	case SLJIT_MUL:
1414 		SLJIT_ASSERT(!(flags & SRC2_IMM));
1415 
1416 		if (GET_FLAG_TYPE(op) != SLJIT_OVERFLOW)
1417 			return push_inst(compiler, MUL | WORD | RD(dst) | RS1(src1) | RS2(src2));
1418 
1419 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1420 		if (word) {
1421 			FAIL_IF(push_inst(compiler, MUL | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
1422 			FAIL_IF(push_inst(compiler, MUL | 0x8 | RD(dst) | RS1(src1) | RS2(src2)));
1423 			return push_inst(compiler, SUB | RD(OTHER_FLAG) | RS1(dst) | RS2(OTHER_FLAG));
1424 		}
1425 #endif /* SLJIT_CONFIG_RISCV_64 */
1426 
1427 		FAIL_IF(push_inst(compiler, MULH | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1428 		FAIL_IF(push_inst(compiler, MUL | RD(dst) | RS1(src1) | RS2(src2)));
1429 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
1430 		FAIL_IF(push_inst(compiler, SRAI | RD(OTHER_FLAG) | RS1(dst) | IMM_I(31)));
1431 #else /* !SLJIT_CONFIG_RISCV_32 */
1432 		FAIL_IF(push_inst(compiler, SRAI | RD(OTHER_FLAG) | RS1(dst) | IMM_I(63)));
1433 #endif /* SLJIT_CONFIG_RISCV_32 */
1434 		return push_inst(compiler, SUB | RD(OTHER_FLAG) | RS1(EQUAL_FLAG) | RS2(OTHER_FLAG));
1435 
1436 	case SLJIT_AND:
1437 		EMIT_LOGICAL(ANDI, AND);
1438 		return SLJIT_SUCCESS;
1439 
1440 	case SLJIT_OR:
1441 		EMIT_LOGICAL(ORI, OR);
1442 		return SLJIT_SUCCESS;
1443 
1444 	case SLJIT_XOR:
1445 		EMIT_LOGICAL(XORI, XOR);
1446 		return SLJIT_SUCCESS;
1447 
1448 	case SLJIT_SHL:
1449 	case SLJIT_MSHL:
1450 		EMIT_SHIFT(SLLI, SLL);
1451 		break;
1452 
1453 	case SLJIT_LSHR:
1454 	case SLJIT_MLSHR:
1455 		EMIT_SHIFT(SRLI, SRL);
1456 		break;
1457 
1458 	case SLJIT_ASHR:
1459 	case SLJIT_MASHR:
1460 		EMIT_SHIFT(SRAI, SRA);
1461 		break;
1462 
1463 	case SLJIT_ROTL:
1464 	case SLJIT_ROTR:
1465 		if (flags & SRC2_IMM) {
1466 			SLJIT_ASSERT(src2 != 0);
1467 
1468 			op_imm = (GET_OPCODE(op) == SLJIT_ROTL) ? SLLI : SRLI;
1469 			FAIL_IF(push_inst(compiler, op_imm | WORD | RD(OTHER_FLAG) | RS1(src1) | IMM_I(src2)));
1470 
1471 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1472 			src2 = ((op & SLJIT_32) ? 32 : 64) - src2;
1473 #else /* !SLJIT_CONFIG_RISCV_64 */
1474 			src2 = 32 - src2;
1475 #endif /* SLJIT_CONFIG_RISCV_64 */
1476 			op_imm = (GET_OPCODE(op) == SLJIT_ROTL) ? SRLI : SLLI;
1477 			FAIL_IF(push_inst(compiler, op_imm | WORD | RD(dst) | RS1(src1) | IMM_I(src2)));
1478 			return push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(OTHER_FLAG));
1479 		}
1480 
1481 		if (src2 == TMP_ZERO) {
1482 			if (dst != src1)
1483 				return push_inst(compiler, ADDI | WORD | RD(dst) | RS1(src1) | IMM_I(0));
1484 			return SLJIT_SUCCESS;
1485 		}
1486 
1487 		FAIL_IF(push_inst(compiler, SUB | WORD | RD(EQUAL_FLAG) | RS1(TMP_ZERO) | RS2(src2)));
1488 		op_reg = (GET_OPCODE(op) == SLJIT_ROTL) ? SLL : SRL;
1489 		FAIL_IF(push_inst(compiler, op_reg | WORD | RD(OTHER_FLAG) | RS1(src1) | RS2(src2)));
1490 		op_reg = (GET_OPCODE(op) == SLJIT_ROTL) ? SRL : SLL;
1491 		FAIL_IF(push_inst(compiler, op_reg | WORD | RD(dst) | RS1(src1) | RS2(EQUAL_FLAG)));
1492 		return push_inst(compiler, OR | RD(dst) | RS1(dst) | RS2(OTHER_FLAG));
1493 
1494 	default:
1495 		SLJIT_UNREACHABLE();
1496 		return SLJIT_SUCCESS;
1497 	}
1498 
1499 	if (flags & SRC2_IMM) {
1500 		if (op & SLJIT_SET_Z)
1501 			FAIL_IF(push_inst(compiler, op_imm | WORD | RD(EQUAL_FLAG) | RS1(src1) | IMM_I(src2)));
1502 
1503 		if (flags & UNUSED_DEST)
1504 			return SLJIT_SUCCESS;
1505 		return push_inst(compiler, op_imm | WORD | RD(dst) | RS1(src1) | IMM_I(src2));
1506 	}
1507 
1508 	if (op & SLJIT_SET_Z)
1509 		FAIL_IF(push_inst(compiler, op_reg | WORD | RD(EQUAL_FLAG) | RS1(src1) | RS2(src2)));
1510 
1511 	if (flags & UNUSED_DEST)
1512 		return SLJIT_SUCCESS;
1513 	return push_inst(compiler, op_reg | WORD | RD(dst) | RS1(src1) | RS2(src2));
1514 }
1515 
1516 #undef IMM_EXTEND
1517 
emit_op(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 flags,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1518 static sljit_s32 emit_op(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 flags,
1519 	sljit_s32 dst, sljit_sw dstw,
1520 	sljit_s32 src1, sljit_sw src1w,
1521 	sljit_s32 src2, sljit_sw src2w)
1522 {
1523 	/* arg1 goes to TMP_REG1 or src reg
1524 	   arg2 goes to TMP_REG2, imm or src reg
1525 	   TMP_REG3 can be used for caching
1526 	   result goes to TMP_REG2, so put result can use TMP_REG1 and TMP_REG3. */
1527 	sljit_s32 dst_r = TMP_REG2;
1528 	sljit_s32 src1_r;
1529 	sljit_sw src2_r = 0;
1530 	sljit_s32 sugg_src2_r = TMP_REG2;
1531 
1532 	if (!(flags & ALT_KEEP_CACHE)) {
1533 		compiler->cache_arg = 0;
1534 		compiler->cache_argw = 0;
1535 	}
1536 
1537 	if (dst == TMP_REG2) {
1538 		SLJIT_ASSERT(HAS_FLAGS(op));
1539 		flags |= UNUSED_DEST;
1540 	}
1541 	else if (FAST_IS_REG(dst)) {
1542 		dst_r = dst;
1543 		flags |= REG_DEST;
1544 		if (flags & MOVE_OP)
1545 			sugg_src2_r = dst_r;
1546 	}
1547 	else if ((dst & SLJIT_MEM) && !getput_arg_fast(compiler, flags | ARG_TEST, TMP_REG1, dst, dstw))
1548 		flags |= SLOW_DEST;
1549 
1550 	if (flags & IMM_OP) {
1551 		if ((src2 & SLJIT_IMM) && src2w != 0 && src2w <= SIMM_MAX && src2w >= SIMM_MIN) {
1552 			flags |= SRC2_IMM;
1553 			src2_r = src2w;
1554 		}
1555 		else if ((flags & CUMULATIVE_OP) && (src1 & SLJIT_IMM) && src1w != 0 && src1w <= SIMM_MAX && src1w >= SIMM_MIN) {
1556 			flags |= SRC2_IMM;
1557 			src2_r = src1w;
1558 
1559 			/* And swap arguments. */
1560 			src1 = src2;
1561 			src1w = src2w;
1562 			src2 = SLJIT_IMM;
1563 			/* src2w = src2_r unneeded. */
1564 		}
1565 	}
1566 
1567 	/* Source 1. */
1568 	if (FAST_IS_REG(src1)) {
1569 		src1_r = src1;
1570 		flags |= REG1_SOURCE;
1571 	}
1572 	else if (src1 & SLJIT_IMM) {
1573 		if (src1w) {
1574 			FAIL_IF(load_immediate(compiler, TMP_REG1, src1w, TMP_REG3));
1575 			src1_r = TMP_REG1;
1576 		}
1577 		else
1578 			src1_r = TMP_ZERO;
1579 	}
1580 	else {
1581 		if (getput_arg_fast(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w))
1582 			FAIL_IF(compiler->error);
1583 		else
1584 			flags |= SLOW_SRC1;
1585 		src1_r = TMP_REG1;
1586 	}
1587 
1588 	/* Source 2. */
1589 	if (FAST_IS_REG(src2)) {
1590 		src2_r = src2;
1591 		flags |= REG2_SOURCE;
1592 		if ((flags & (REG_DEST | MOVE_OP)) == MOVE_OP)
1593 			dst_r = (sljit_s32)src2_r;
1594 	}
1595 	else if (src2 & SLJIT_IMM) {
1596 		if (!(flags & SRC2_IMM)) {
1597 			if (src2w) {
1598 				FAIL_IF(load_immediate(compiler, sugg_src2_r, src2w, TMP_REG3));
1599 				src2_r = sugg_src2_r;
1600 			}
1601 			else {
1602 				src2_r = TMP_ZERO;
1603 				if (flags & MOVE_OP) {
1604 					if (dst & SLJIT_MEM)
1605 						dst_r = 0;
1606 					else
1607 						op = SLJIT_MOV;
1608 				}
1609 			}
1610 		}
1611 	}
1612 	else {
1613 		if (getput_arg_fast(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w))
1614 			FAIL_IF(compiler->error);
1615 		else
1616 			flags |= SLOW_SRC2;
1617 		src2_r = sugg_src2_r;
1618 	}
1619 
1620 	if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
1621 		SLJIT_ASSERT(src2_r == TMP_REG2);
1622 		if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
1623 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, src1, src1w));
1624 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
1625 		}
1626 		else {
1627 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, src2, src2w));
1628 			FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG2, src2, src2w, dst, dstw));
1629 		}
1630 	}
1631 	else if (flags & SLOW_SRC1)
1632 		FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, TMP_REG1, src1, src1w, dst, dstw));
1633 	else if (flags & SLOW_SRC2)
1634 		FAIL_IF(getput_arg(compiler, flags | LOAD_DATA, sugg_src2_r, src2, src2w, dst, dstw));
1635 
1636 	FAIL_IF(emit_single_op(compiler, op, flags, dst_r, src1_r, src2_r));
1637 
1638 	if (dst & SLJIT_MEM) {
1639 		if (!(flags & SLOW_DEST)) {
1640 			getput_arg_fast(compiler, flags, dst_r, dst, dstw);
1641 			return compiler->error;
1642 		}
1643 		return getput_arg(compiler, flags, dst_r, dst, dstw, 0, 0);
1644 	}
1645 
1646 	return SLJIT_SUCCESS;
1647 }
1648 
sljit_emit_op0(struct sljit_compiler * compiler,sljit_s32 op)1649 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)
1650 {
1651 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1652 	sljit_ins word = (op & SLJIT_32) >> 5;
1653 
1654 	SLJIT_ASSERT(word == 0 || word == 0x8);
1655 #endif /* SLJIT_CONFIG_RISCV_64 */
1656 
1657 	CHECK_ERROR();
1658 	CHECK(check_sljit_emit_op0(compiler, op));
1659 
1660 	switch (GET_OPCODE(op)) {
1661 	case SLJIT_BREAKPOINT:
1662 		return push_inst(compiler, EBREAK);
1663 	case SLJIT_NOP:
1664 		return push_inst(compiler, ADDI | RD(TMP_ZERO) | RS1(TMP_ZERO) | IMM_I(0));
1665 	case SLJIT_LMUL_UW:
1666 		FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(SLJIT_R1) | IMM_I(0)));
1667 		FAIL_IF(push_inst(compiler, MULHU | RD(SLJIT_R1) | RS1(SLJIT_R0) | RS2(SLJIT_R1)));
1668 		return push_inst(compiler, MUL | RD(SLJIT_R0) | RS1(SLJIT_R0) | RS2(TMP_REG1));
1669 	case SLJIT_LMUL_SW:
1670 		FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(SLJIT_R1) | IMM_I(0)));
1671 		FAIL_IF(push_inst(compiler, MULH | RD(SLJIT_R1) | RS1(SLJIT_R0) | RS2(SLJIT_R1)));
1672 		return push_inst(compiler, MUL | RD(SLJIT_R0) | RS1(SLJIT_R0) | RS2(TMP_REG1));
1673 	case SLJIT_DIVMOD_UW:
1674 		FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(SLJIT_R0) | IMM_I(0)));
1675 		FAIL_IF(push_inst(compiler, DIVU | WORD | RD(SLJIT_R0) | RS1(SLJIT_R0) | RS2(SLJIT_R1)));
1676 		return push_inst(compiler, REMU | WORD | RD(SLJIT_R1) | RS1(TMP_REG1) | RS2(SLJIT_R1));
1677 	case SLJIT_DIVMOD_SW:
1678 		FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(SLJIT_R0) | IMM_I(0)));
1679 		FAIL_IF(push_inst(compiler, DIV | WORD | RD(SLJIT_R0) | RS1(SLJIT_R0) | RS2(SLJIT_R1)));
1680 		return push_inst(compiler, REM | WORD | RD(SLJIT_R1) | RS1(TMP_REG1) | RS2(SLJIT_R1));
1681 	case SLJIT_DIV_UW:
1682 		return push_inst(compiler, DIVU | WORD | RD(SLJIT_R0) | RS1(SLJIT_R0) | RS2(SLJIT_R1));
1683 	case SLJIT_DIV_SW:
1684 		return push_inst(compiler, DIV | WORD | RD(SLJIT_R0) | RS1(SLJIT_R0) | RS2(SLJIT_R1));
1685 	case SLJIT_ENDBR:
1686 	case SLJIT_SKIP_FRAMES_BEFORE_RETURN:
1687 		return SLJIT_SUCCESS;
1688 	}
1689 
1690 	return SLJIT_SUCCESS;
1691 }
1692 
sljit_emit_op1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1693 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op,
1694 	sljit_s32 dst, sljit_sw dstw,
1695 	sljit_s32 src, sljit_sw srcw)
1696 {
1697 	sljit_s32 flags = 0;
1698 
1699 	CHECK_ERROR();
1700 	CHECK(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));
1701 	ADJUST_LOCAL_OFFSET(dst, dstw);
1702 	ADJUST_LOCAL_OFFSET(src, srcw);
1703 
1704 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1705 	if (op & SLJIT_32)
1706 		flags = INT_DATA | SIGNED_DATA;
1707 #endif
1708 
1709 	switch (GET_OPCODE(op)) {
1710 	case SLJIT_MOV:
1711 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
1712 	case SLJIT_MOV_U32:
1713 	case SLJIT_MOV_S32:
1714 	case SLJIT_MOV32:
1715 #endif
1716 	case SLJIT_MOV_P:
1717 		return emit_op(compiler, SLJIT_MOV, WORD_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, srcw);
1718 
1719 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1720 	case SLJIT_MOV_U32:
1721 		return emit_op(compiler, SLJIT_MOV_U32, INT_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u32)srcw : srcw);
1722 
1723 	case SLJIT_MOV_S32:
1724 	/* Logical operators have no W variant, so sign extended input is necessary for them. */
1725 	case SLJIT_MOV32:
1726 		return emit_op(compiler, SLJIT_MOV_S32, INT_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s32)srcw : srcw);
1727 #endif
1728 
1729 	case SLJIT_MOV_U8:
1730 		return emit_op(compiler, op, BYTE_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u8)srcw : srcw);
1731 
1732 	case SLJIT_MOV_S8:
1733 		return emit_op(compiler, op, BYTE_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s8)srcw : srcw);
1734 
1735 	case SLJIT_MOV_U16:
1736 		return emit_op(compiler, op, HALF_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_u16)srcw : srcw);
1737 
1738 	case SLJIT_MOV_S16:
1739 		return emit_op(compiler, op, HALF_DATA | SIGNED_DATA | MOVE_OP, dst, dstw, TMP_REG1, 0, src, (src & SLJIT_IMM) ? (sljit_s16)srcw : srcw);
1740 
1741 	case SLJIT_NOT:
1742 		return emit_op(compiler, SLJIT_XOR | (op & (SLJIT_32 | SLJIT_SET_Z)), flags, dst, dstw, src, srcw, SLJIT_IMM, -1);
1743 
1744 	case SLJIT_CLZ:
1745 	case SLJIT_CTZ:
1746 		return emit_op(compiler, op, flags, dst, dstw, TMP_REG1, 0, src, srcw);
1747 	}
1748 
1749 	SLJIT_UNREACHABLE();
1750 	return SLJIT_SUCCESS;
1751 }
1752 
sljit_emit_op2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1753 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op,
1754 	sljit_s32 dst, sljit_sw dstw,
1755 	sljit_s32 src1, sljit_sw src1w,
1756 	sljit_s32 src2, sljit_sw src2w)
1757 {
1758 	sljit_s32 flags = 0;
1759 
1760 	CHECK_ERROR();
1761 	CHECK(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));
1762 	ADJUST_LOCAL_OFFSET(dst, dstw);
1763 	ADJUST_LOCAL_OFFSET(src1, src1w);
1764 	ADJUST_LOCAL_OFFSET(src2, src2w);
1765 
1766 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1767 	if (op & SLJIT_32) {
1768 		flags |= INT_DATA | SIGNED_DATA;
1769 		if (src1 & SLJIT_IMM)
1770 			src1w = (sljit_s32)src1w;
1771 		if (src2 & SLJIT_IMM)
1772 			src2w = (sljit_s32)src2w;
1773 	}
1774 #endif
1775 
1776 	switch (GET_OPCODE(op)) {
1777 	case SLJIT_ADD:
1778 	case SLJIT_ADDC:
1779 		compiler->status_flags_state = SLJIT_CURRENT_FLAGS_ADD;
1780 		return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1781 
1782 	case SLJIT_SUB:
1783 	case SLJIT_SUBC:
1784 		compiler->status_flags_state = SLJIT_CURRENT_FLAGS_SUB;
1785 		return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1786 
1787 	case SLJIT_MUL:
1788 		compiler->status_flags_state = 0;
1789 		return emit_op(compiler, op, flags | CUMULATIVE_OP, dst, dstw, src1, src1w, src2, src2w);
1790 
1791 	case SLJIT_AND:
1792 	case SLJIT_OR:
1793 	case SLJIT_XOR:
1794 		return emit_op(compiler, op, flags | CUMULATIVE_OP | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1795 
1796 	case SLJIT_SHL:
1797 	case SLJIT_MSHL:
1798 	case SLJIT_LSHR:
1799 	case SLJIT_MLSHR:
1800 	case SLJIT_ASHR:
1801 	case SLJIT_MASHR:
1802 	case SLJIT_ROTL:
1803 	case SLJIT_ROTR:
1804 		if (src2 & SLJIT_IMM) {
1805 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
1806 			src2w &= 0x1f;
1807 #else /* !SLJIT_CONFIG_RISCV_32 */
1808 			if (op & SLJIT_32)
1809 				src2w &= 0x1f;
1810 			else
1811 				src2w &= 0x3f;
1812 #endif /* SLJIT_CONFIG_RISCV_32 */
1813 		}
1814 
1815 		return emit_op(compiler, op, flags | IMM_OP, dst, dstw, src1, src1w, src2, src2w);
1816 	}
1817 
1818 	SLJIT_UNREACHABLE();
1819 	return SLJIT_SUCCESS;
1820 }
1821 
sljit_emit_op2u(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1822 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op,
1823 	sljit_s32 src1, sljit_sw src1w,
1824 	sljit_s32 src2, sljit_sw src2w)
1825 {
1826 	CHECK_ERROR();
1827 	CHECK(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));
1828 
1829 	SLJIT_SKIP_CHECKS(compiler);
1830 	return sljit_emit_op2(compiler, op, TMP_REG2, 0, src1, src1w, src2, src2w);
1831 }
1832 
sljit_emit_shift_into(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src_dst,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)1833 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op,
1834 	sljit_s32 src_dst,
1835 	sljit_s32 src1, sljit_sw src1w,
1836 	sljit_s32 src2, sljit_sw src2w)
1837 {
1838 	sljit_s32 is_left;
1839 	sljit_ins ins1, ins2, ins3;
1840 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
1841 	sljit_ins word = (op & SLJIT_32) >> 5;
1842 	sljit_s32 inp_flags = ((op & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
1843 	sljit_sw bit_length = (op & SLJIT_32) ? 32 : 64;
1844 #else /* !SLJIT_CONFIG_RISCV_64 */
1845 	sljit_s32 inp_flags = WORD_DATA | LOAD_DATA;
1846 	sljit_sw bit_length = 32;
1847 #endif /* SLJIT_CONFIG_RISCV_64 */
1848 
1849 	SLJIT_ASSERT(WORD == 0 || WORD == 0x8);
1850 
1851 	CHECK_ERROR();
1852 	CHECK(check_sljit_emit_shift_into(compiler, op, src_dst, src1, src1w, src2, src2w));
1853 
1854 	is_left = (GET_OPCODE(op) == SLJIT_SHL || GET_OPCODE(op) == SLJIT_MSHL);
1855 
1856 	if (src_dst == src1) {
1857 		SLJIT_SKIP_CHECKS(compiler);
1858 		return sljit_emit_op2(compiler, (is_left ? SLJIT_ROTL : SLJIT_ROTR) | (op & SLJIT_32), src_dst, 0, src_dst, 0, src2, src2w);
1859 	}
1860 
1861 	ADJUST_LOCAL_OFFSET(src1, src1w);
1862 	ADJUST_LOCAL_OFFSET(src2, src2w);
1863 
1864 	if (src2 & SLJIT_IMM) {
1865 		src2w &= bit_length - 1;
1866 
1867 		if (src2w == 0)
1868 			return SLJIT_SUCCESS;
1869 	} else if (src2 & SLJIT_MEM) {
1870 		FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG2, src2, src2w));
1871 		src2 = TMP_REG2;
1872 	}
1873 
1874 	if (src1 & SLJIT_MEM) {
1875 		FAIL_IF(emit_op_mem(compiler, inp_flags, TMP_REG1, src1, src1w));
1876 		src1 = TMP_REG1;
1877 	} else if (src1 & SLJIT_IMM) {
1878 		FAIL_IF(load_immediate(compiler, TMP_REG1, src1w, TMP_REG3));
1879 		src1 = TMP_REG1;
1880 	}
1881 
1882 	if (src2 & SLJIT_IMM) {
1883 		if (is_left) {
1884 			ins1 = SLLI | WORD | IMM_I(src2w);
1885 			src2w = bit_length - src2w;
1886 			ins2 = SRLI | WORD | IMM_I(src2w);
1887 		} else {
1888 			ins1 = SRLI | WORD | IMM_I(src2w);
1889 			src2w = bit_length - src2w;
1890 			ins2 = SLLI | WORD | IMM_I(src2w);
1891 		}
1892 
1893 		FAIL_IF(push_inst(compiler, ins1 | RD(src_dst) | RS1(src_dst)));
1894 		FAIL_IF(push_inst(compiler, ins2 | RD(TMP_REG1) | RS1(src1)));
1895 		return push_inst(compiler, OR | RD(src_dst) | RS1(src_dst) | RS2(TMP_REG1));
1896 	}
1897 
1898 	if (is_left) {
1899 		ins1 = SLL;
1900 		ins2 = SRLI;
1901 		ins3 = SRL;
1902 	} else {
1903 		ins1 = SRL;
1904 		ins2 = SLLI;
1905 		ins3 = SLL;
1906 	}
1907 
1908 	FAIL_IF(push_inst(compiler, ins1 | WORD | RD(src_dst) | RS1(src_dst) | RS2(src2)));
1909 
1910 	if (!(op & SLJIT_SHIFT_INTO_NON_ZERO)) {
1911 		FAIL_IF(push_inst(compiler, ins2 | WORD | RD(TMP_REG1) | RS1(src1) | IMM_I(1)));
1912 		FAIL_IF(push_inst(compiler, XORI | RD(TMP_REG2) | RS1(src2) | IMM_I((sljit_ins)bit_length - 1)));
1913 		src1 = TMP_REG1;
1914 	} else
1915 		FAIL_IF(push_inst(compiler, SUB | WORD | RD(TMP_REG2) | RS1(TMP_ZERO) | RS2(src2)));
1916 
1917 	FAIL_IF(push_inst(compiler, ins3 | WORD | RD(TMP_REG1) | RS1(src1) | RS2(TMP_REG2)));
1918 	return push_inst(compiler, OR | RD(src_dst) | RS1(src_dst) | RS2(TMP_REG1));
1919 }
1920 
1921 #undef WORD
1922 
sljit_emit_op_src(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src,sljit_sw srcw)1923 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op,
1924 	sljit_s32 src, sljit_sw srcw)
1925 {
1926 	CHECK_ERROR();
1927 	CHECK(check_sljit_emit_op_src(compiler, op, src, srcw));
1928 	ADJUST_LOCAL_OFFSET(src, srcw);
1929 
1930 	switch (op) {
1931 	case SLJIT_FAST_RETURN:
1932 		if (FAST_IS_REG(src))
1933 			FAIL_IF(push_inst(compiler, ADDI | RD(RETURN_ADDR_REG) | RS1(src) | IMM_I(0)));
1934 		else
1935 			FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, RETURN_ADDR_REG, src, srcw));
1936 
1937 		return push_inst(compiler, JALR | RD(TMP_ZERO) | RS1(RETURN_ADDR_REG) | IMM_I(0));
1938 	case SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN:
1939 		return SLJIT_SUCCESS;
1940 	case SLJIT_PREFETCH_L1:
1941 	case SLJIT_PREFETCH_L2:
1942 	case SLJIT_PREFETCH_L3:
1943 	case SLJIT_PREFETCH_ONCE:
1944 		return SLJIT_SUCCESS;
1945 	}
1946 
1947 	return SLJIT_SUCCESS;
1948 }
1949 
sljit_get_register_index(sljit_s32 reg)1950 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 reg)
1951 {
1952 	CHECK_REG_INDEX(check_sljit_get_register_index(reg));
1953 	return reg_map[reg];
1954 }
1955 
sljit_get_float_register_index(sljit_s32 reg)1956 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_float_register_index(sljit_s32 reg)
1957 {
1958 	CHECK_REG_INDEX(check_sljit_get_float_register_index(reg));
1959 	return freg_map[reg];
1960 }
1961 
sljit_emit_op_custom(struct sljit_compiler * compiler,void * instruction,sljit_u32 size)1962 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler,
1963 	void *instruction, sljit_u32 size)
1964 {
1965 	CHECK_ERROR();
1966 	CHECK(check_sljit_emit_op_custom(compiler, instruction, size));
1967 
1968 	return push_inst(compiler, *(sljit_ins*)instruction);
1969 }
1970 
1971 /* --------------------------------------------------------------------- */
1972 /*  Floating point operators                                             */
1973 /* --------------------------------------------------------------------- */
1974 
1975 #define FLOAT_DATA(op) (DOUBLE_DATA | ((op & SLJIT_32) >> 7))
1976 #define FMT(op) ((sljit_ins)((op & SLJIT_32) ^ SLJIT_32) << 17)
1977 
sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)1978 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op,
1979 	sljit_s32 dst, sljit_sw dstw,
1980 	sljit_s32 src, sljit_sw srcw)
1981 {
1982 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
1983 #	define flags (sljit_u32)0
1984 #else
1985 	sljit_u32 flags = ((sljit_u32)(GET_OPCODE(op) == SLJIT_CONV_SW_FROM_F64)) << 21;
1986 #endif
1987 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
1988 
1989 	if (src & SLJIT_MEM) {
1990 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src, srcw, dst, dstw));
1991 		src = TMP_FREG1;
1992 	}
1993 
1994 	FAIL_IF(push_inst(compiler, FCVT_W_S | FMT(op) | flags | RD(dst_r) | FRS1(src)));
1995 
1996 	/* Store the integer value from a VFP register. */
1997 	if (dst & SLJIT_MEM) {
1998 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
1999 		return emit_op_mem2(compiler, WORD_DATA, TMP_REG2, dst, dstw, 0, 0);
2000 #else
2001 		return emit_op_mem2(compiler, flags ? WORD_DATA : INT_DATA, TMP_REG2, dst, dstw, 0, 0);
2002 #endif
2003 	}
2004 	return SLJIT_SUCCESS;
2005 
2006 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2007 #	undef flags
2008 #endif
2009 }
2010 
sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)2011 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op,
2012 	sljit_s32 dst, sljit_sw dstw,
2013 	sljit_s32 src, sljit_sw srcw)
2014 {
2015 	sljit_ins inst;
2016 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
2017 	sljit_u32 flags = ((sljit_u32)(GET_OPCODE(op) == SLJIT_CONV_F64_FROM_SW)) << 21;
2018 #endif
2019 
2020 	sljit_s32 dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
2021 
2022 	if (src & SLJIT_MEM) {
2023 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2024 		FAIL_IF(emit_op_mem2(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
2025 #else
2026 		FAIL_IF(emit_op_mem2(compiler, (flags ? WORD_DATA : INT_DATA) | LOAD_DATA, TMP_REG1, src, srcw, dst, dstw));
2027 #endif
2028 		src = TMP_REG1;
2029 	} else if (src & SLJIT_IMM) {
2030 #if (defined SLJIT_CONFIG_RISCV_64 && SLJIT_CONFIG_RISCV_64)
2031 		if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_S32)
2032 			srcw = (sljit_s32)srcw;
2033 #endif
2034 
2035 		FAIL_IF(load_immediate(compiler, TMP_REG1, srcw, TMP_REG3));
2036 		src = TMP_REG1;
2037 	}
2038 
2039 	inst = FCVT_S_W | FMT(op) | FRD(dst_r) | RS1(src);
2040 
2041 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2042 	if (op & SLJIT_32)
2043 		inst |= F3(0x7);
2044 #else
2045 	inst |= flags;
2046 
2047 	if (op != SLJIT_CONV_F64_FROM_S32)
2048 		inst |= F3(0x7);
2049 #endif
2050 
2051 	FAIL_IF(push_inst(compiler, inst));
2052 
2053 	if (dst & SLJIT_MEM)
2054 		return emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG1, dst, dstw, 0, 0);
2055 	return SLJIT_SUCCESS;
2056 }
2057 
sljit_emit_fop1_cmp(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)2058 static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op,
2059 	sljit_s32 src1, sljit_sw src1w,
2060 	sljit_s32 src2, sljit_sw src2w)
2061 {
2062 	sljit_ins inst;
2063 
2064 	if (src1 & SLJIT_MEM) {
2065 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
2066 		src1 = TMP_FREG1;
2067 	}
2068 
2069 	if (src2 & SLJIT_MEM) {
2070 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, 0, 0));
2071 		src2 = TMP_FREG2;
2072 	}
2073 
2074 	switch (GET_FLAG_TYPE(op)) {
2075 	case SLJIT_F_EQUAL:
2076 	case SLJIT_F_NOT_EQUAL:
2077 	case SLJIT_ORDERED_EQUAL:
2078 	case SLJIT_UNORDERED_OR_NOT_EQUAL:
2079 		inst = FEQ_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2);
2080 		break;
2081 	case SLJIT_F_LESS:
2082 	case SLJIT_F_GREATER_EQUAL:
2083 	case SLJIT_ORDERED_LESS:
2084 	case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2085 		inst = FLT_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2);
2086 		break;
2087 	case SLJIT_ORDERED_GREATER:
2088 	case SLJIT_UNORDERED_OR_LESS_EQUAL:
2089 		inst = FLT_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src2) | FRS2(src1);
2090 		break;
2091 	case SLJIT_F_GREATER:
2092 	case SLJIT_F_LESS_EQUAL:
2093 	case SLJIT_UNORDERED_OR_GREATER:
2094 	case SLJIT_ORDERED_LESS_EQUAL:
2095 		inst = FLE_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2);
2096 		break;
2097 	case SLJIT_UNORDERED_OR_LESS:
2098 	case SLJIT_ORDERED_GREATER_EQUAL:
2099 		inst = FLE_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src2) | FRS2(src1);
2100 		break;
2101 	case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
2102 	case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
2103 		FAIL_IF(push_inst(compiler, FLT_S | FMT(op) | RD(OTHER_FLAG) | FRS1(src1) | FRS2(src2)));
2104 		FAIL_IF(push_inst(compiler, FLT_S | FMT(op) | RD(TMP_REG1) | FRS1(src2) | FRS2(src1)));
2105 		inst = OR | RD(OTHER_FLAG) | RS1(OTHER_FLAG) | RS2(TMP_REG1);
2106 		break;
2107 	default: /* SLJIT_UNORDERED, SLJIT_ORDERED */
2108 		FAIL_IF(push_inst(compiler, FADD_S | FMT(op) | FRD(TMP_FREG1) | FRS1(src1) | FRS2(src2)));
2109 		inst = FEQ_S | FMT(op) | RD(OTHER_FLAG) | FRS1(TMP_FREG1) | FRS2(TMP_FREG1);
2110 		break;
2111 	}
2112 
2113 	return push_inst(compiler, inst);
2114 }
2115 
sljit_emit_fop1(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src,sljit_sw srcw)2116 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op,
2117 	sljit_s32 dst, sljit_sw dstw,
2118 	sljit_s32 src, sljit_sw srcw)
2119 {
2120 	sljit_s32 dst_r;
2121 
2122 	CHECK_ERROR();
2123 	compiler->cache_arg = 0;
2124 	compiler->cache_argw = 0;
2125 
2126 	SLJIT_COMPILE_ASSERT((SLJIT_32 == 0x100) && !(DOUBLE_DATA & 0x2), float_transfer_bit_error);
2127 	SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);
2128 
2129 	if (GET_OPCODE(op) == SLJIT_CONV_F64_FROM_F32)
2130 		op ^= SLJIT_32;
2131 
2132 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG1;
2133 
2134 	if (src & SLJIT_MEM) {
2135 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op) | LOAD_DATA, dst_r, src, srcw, dst, dstw));
2136 		src = dst_r;
2137 	}
2138 
2139 	switch (GET_OPCODE(op)) {
2140 	case SLJIT_MOV_F64:
2141 		if (src != dst_r) {
2142 			if (dst_r != TMP_FREG1)
2143 				FAIL_IF(push_inst(compiler, FSGNJ_S | FMT(op) | FRD(dst_r) | FRS1(src) | FRS2(src)));
2144 			else
2145 				dst_r = src;
2146 		}
2147 		break;
2148 	case SLJIT_NEG_F64:
2149 		FAIL_IF(push_inst(compiler, FSGNJN_S | FMT(op) | FRD(dst_r) | FRS1(src) | FRS2(src)));
2150 		break;
2151 	case SLJIT_ABS_F64:
2152 		FAIL_IF(push_inst(compiler, FSGNJX_S | FMT(op) | FRD(dst_r) | FRS1(src) | FRS2(src)));
2153 		break;
2154 	case SLJIT_CONV_F64_FROM_F32:
2155 		/* The SLJIT_32 bit is inverted because sljit_f32 needs to be loaded from the memory. */
2156 		FAIL_IF(push_inst(compiler, FCVT_S_D | ((op & SLJIT_32) ? (1 << 25) : ((1 << 20) | F3(7))) | FRD(dst_r) | FRS1(src)));
2157 		op ^= SLJIT_32;
2158 		break;
2159 	}
2160 
2161 	if (dst & SLJIT_MEM)
2162 		return emit_op_mem2(compiler, FLOAT_DATA(op), dst_r, dst, dstw, 0, 0);
2163 	return SLJIT_SUCCESS;
2164 }
2165 
sljit_emit_fop2(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)2166 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op,
2167 	sljit_s32 dst, sljit_sw dstw,
2168 	sljit_s32 src1, sljit_sw src1w,
2169 	sljit_s32 src2, sljit_sw src2w)
2170 {
2171 	sljit_s32 dst_r, flags = 0;
2172 
2173 	CHECK_ERROR();
2174 	CHECK(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));
2175 	ADJUST_LOCAL_OFFSET(dst, dstw);
2176 	ADJUST_LOCAL_OFFSET(src1, src1w);
2177 	ADJUST_LOCAL_OFFSET(src2, src2w);
2178 
2179 	compiler->cache_arg = 0;
2180 	compiler->cache_argw = 0;
2181 
2182 	dst_r = FAST_IS_REG(dst) ? dst : TMP_FREG2;
2183 
2184 	if (src1 & SLJIT_MEM) {
2185 		if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w)) {
2186 			FAIL_IF(compiler->error);
2187 			src1 = TMP_FREG1;
2188 		} else
2189 			flags |= SLOW_SRC1;
2190 	}
2191 
2192 	if (src2 & SLJIT_MEM) {
2193 		if (getput_arg_fast(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w)) {
2194 			FAIL_IF(compiler->error);
2195 			src2 = TMP_FREG2;
2196 		} else
2197 			flags |= SLOW_SRC2;
2198 	}
2199 
2200 	if ((flags & (SLOW_SRC1 | SLOW_SRC2)) == (SLOW_SRC1 | SLOW_SRC2)) {
2201 		if (!can_cache(src1, src1w, src2, src2w) && can_cache(src1, src1w, dst, dstw)) {
2202 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, src1, src1w));
2203 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
2204 		}
2205 		else {
2206 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, src2, src2w));
2207 			FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
2208 		}
2209 	}
2210 	else if (flags & SLOW_SRC1)
2211 		FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG1, src1, src1w, dst, dstw));
2212 	else if (flags & SLOW_SRC2)
2213 		FAIL_IF(getput_arg(compiler, FLOAT_DATA(op) | LOAD_DATA, TMP_FREG2, src2, src2w, dst, dstw));
2214 
2215 	if (flags & SLOW_SRC1)
2216 		src1 = TMP_FREG1;
2217 	if (flags & SLOW_SRC2)
2218 		src2 = TMP_FREG2;
2219 
2220 	switch (GET_OPCODE(op)) {
2221 	case SLJIT_ADD_F64:
2222 		FAIL_IF(push_inst(compiler, FADD_S | FMT(op) | FRD(dst_r) | FRS1(src1) | FRS2(src2)));
2223 		break;
2224 
2225 	case SLJIT_SUB_F64:
2226 		FAIL_IF(push_inst(compiler, FSUB_S | FMT(op) | FRD(dst_r) | FRS1(src1) | FRS2(src2)));
2227 		break;
2228 
2229 	case SLJIT_MUL_F64:
2230 		FAIL_IF(push_inst(compiler, FMUL_S | FMT(op) | FRD(dst_r) | FRS1(src1) | FRS2(src2)));
2231 		break;
2232 
2233 	case SLJIT_DIV_F64:
2234 		FAIL_IF(push_inst(compiler, FDIV_S | FMT(op) | FRD(dst_r) | FRS1(src1) | FRS2(src2)));
2235 		break;
2236 	}
2237 
2238 	if (dst_r == TMP_FREG2)
2239 		FAIL_IF(emit_op_mem2(compiler, FLOAT_DATA(op), TMP_FREG2, dst, dstw, 0, 0));
2240 
2241 	return SLJIT_SUCCESS;
2242 }
2243 
2244 #undef FLOAT_DATA
2245 #undef FMT
2246 
2247 /* --------------------------------------------------------------------- */
2248 /*  Other instructions                                                   */
2249 /* --------------------------------------------------------------------- */
2250 
sljit_emit_fast_enter(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)2251 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
2252 {
2253 	CHECK_ERROR();
2254 	CHECK(check_sljit_emit_fast_enter(compiler, dst, dstw));
2255 	ADJUST_LOCAL_OFFSET(dst, dstw);
2256 
2257 	if (FAST_IS_REG(dst))
2258 		return push_inst(compiler, ADDI | RD(dst) | RS1(RETURN_ADDR_REG) | IMM_I(0));
2259 
2260 	/* Memory. */
2261 	return emit_op_mem(compiler, WORD_DATA, RETURN_ADDR_REG, dst, dstw);
2262 }
2263 
2264 /* --------------------------------------------------------------------- */
2265 /*  Conditional instructions                                             */
2266 /* --------------------------------------------------------------------- */
2267 
sljit_emit_label(struct sljit_compiler * compiler)2268 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler)
2269 {
2270 	struct sljit_label *label;
2271 
2272 	CHECK_ERROR_PTR();
2273 	CHECK_PTR(check_sljit_emit_label(compiler));
2274 
2275 	if (compiler->last_label && compiler->last_label->size == compiler->size)
2276 		return compiler->last_label;
2277 
2278 	label = (struct sljit_label*)ensure_abuf(compiler, sizeof(struct sljit_label));
2279 	PTR_FAIL_IF(!label);
2280 	set_label(label, compiler);
2281 	return label;
2282 }
2283 
2284 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2285 #define BRANCH_LENGTH	((sljit_ins)(3 * sizeof(sljit_ins)) << 7)
2286 #else
2287 #define BRANCH_LENGTH	((sljit_ins)(7 * sizeof(sljit_ins)) << 7)
2288 #endif
2289 
sljit_emit_jump(struct sljit_compiler * compiler,sljit_s32 type)2290 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)
2291 {
2292 	struct sljit_jump *jump;
2293 	sljit_ins inst;
2294 
2295 	CHECK_ERROR_PTR();
2296 	CHECK_PTR(check_sljit_emit_jump(compiler, type));
2297 
2298 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2299 	PTR_FAIL_IF(!jump);
2300 	set_jump(jump, compiler, type & SLJIT_REWRITABLE_JUMP);
2301 	type &= 0xff;
2302 
2303 	switch (type) {
2304 	case SLJIT_EQUAL:
2305 		inst = BNE | RS1(EQUAL_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
2306 		break;
2307 	case SLJIT_NOT_EQUAL:
2308 		inst = BEQ | RS1(EQUAL_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
2309 		break;
2310 	case SLJIT_LESS:
2311 	case SLJIT_GREATER:
2312 	case SLJIT_SIG_LESS:
2313 	case SLJIT_SIG_GREATER:
2314 	case SLJIT_OVERFLOW:
2315 	case SLJIT_CARRY:
2316 	case SLJIT_F_EQUAL:
2317 	case SLJIT_ORDERED_EQUAL:
2318 	case SLJIT_ORDERED_NOT_EQUAL: /* Not supported. */
2319 	case SLJIT_F_LESS:
2320 	case SLJIT_ORDERED_LESS:
2321 	case SLJIT_ORDERED_GREATER:
2322 	case SLJIT_F_LESS_EQUAL:
2323 	case SLJIT_ORDERED_LESS_EQUAL:
2324 	case SLJIT_ORDERED_GREATER_EQUAL:
2325 	case SLJIT_ORDERED:
2326 		inst = BEQ | RS1(OTHER_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
2327 		break;
2328 	case SLJIT_GREATER_EQUAL:
2329 	case SLJIT_LESS_EQUAL:
2330 	case SLJIT_SIG_GREATER_EQUAL:
2331 	case SLJIT_SIG_LESS_EQUAL:
2332 	case SLJIT_NOT_OVERFLOW:
2333 	case SLJIT_NOT_CARRY:
2334 	case SLJIT_F_NOT_EQUAL:
2335 	case SLJIT_UNORDERED_OR_NOT_EQUAL:
2336 	case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
2337 	case SLJIT_F_GREATER_EQUAL:
2338 	case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2339 	case SLJIT_UNORDERED_OR_LESS_EQUAL:
2340 	case SLJIT_F_GREATER:
2341 	case SLJIT_UNORDERED_OR_GREATER:
2342 	case SLJIT_UNORDERED_OR_LESS:
2343 	case SLJIT_UNORDERED:
2344 		inst = BNE | RS1(OTHER_FLAG) | RS2(TMP_ZERO) | BRANCH_LENGTH;
2345 		break;
2346 	default:
2347 		/* Not conditional branch. */
2348 		inst = 0;
2349 		break;
2350 	}
2351 
2352 	if (inst != 0) {
2353 		PTR_FAIL_IF(push_inst(compiler, inst));
2354 		jump->flags |= IS_COND;
2355 	}
2356 
2357 	jump->addr = compiler->size;
2358 	inst = JALR | RS1(TMP_REG1) | IMM_I(0);
2359 
2360 	if (type >= SLJIT_FAST_CALL) {
2361 		jump->flags |= IS_CALL;
2362 		inst |= RD(RETURN_ADDR_REG);
2363 	}
2364 
2365 	PTR_FAIL_IF(push_inst(compiler, inst));
2366 
2367 	/* Maximum number of instructions required for generating a constant. */
2368 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2369 	compiler->size += 1;
2370 #else
2371 	compiler->size += 5;
2372 #endif
2373 	return jump;
2374 }
2375 
sljit_emit_call(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types)2376 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_call(struct sljit_compiler *compiler, sljit_s32 type,
2377 	sljit_s32 arg_types)
2378 {
2379 	SLJIT_UNUSED_ARG(arg_types);
2380 	CHECK_ERROR_PTR();
2381 	CHECK_PTR(check_sljit_emit_call(compiler, type, arg_types));
2382 
2383 	if (type & SLJIT_CALL_RETURN) {
2384 		PTR_FAIL_IF(emit_stack_frame_release(compiler, 0));
2385 		type = SLJIT_JUMP | (type & SLJIT_REWRITABLE_JUMP);
2386 	}
2387 
2388 	SLJIT_SKIP_CHECKS(compiler);
2389 	return sljit_emit_jump(compiler, type);
2390 }
2391 
sljit_emit_cmp(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 src1,sljit_sw src1w,sljit_s32 src2,sljit_sw src2w)2392 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_cmp(struct sljit_compiler *compiler, sljit_s32 type,
2393 	sljit_s32 src1, sljit_sw src1w,
2394 	sljit_s32 src2, sljit_sw src2w)
2395 {
2396 	struct sljit_jump *jump;
2397 	sljit_s32 flags;
2398 	sljit_ins inst;
2399 
2400 	CHECK_ERROR_PTR();
2401 	CHECK_PTR(check_sljit_emit_cmp(compiler, type, src1, src1w, src2, src2w));
2402 	ADJUST_LOCAL_OFFSET(src1, src1w);
2403 	ADJUST_LOCAL_OFFSET(src2, src2w);
2404 
2405 	compiler->cache_arg = 0;
2406 	compiler->cache_argw = 0;
2407 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2408 	flags = WORD_DATA | LOAD_DATA;
2409 #else /* !SLJIT_CONFIG_RISCV_32 */
2410 	flags = ((type & SLJIT_32) ? INT_DATA : WORD_DATA) | LOAD_DATA;
2411 #endif /* SLJIT_CONFIG_RISCV_32 */
2412 
2413 	if (src1 & SLJIT_MEM) {
2414 		PTR_FAIL_IF(emit_op_mem2(compiler, flags, TMP_REG1, src1, src1w, src2, src2w));
2415 		src1 = TMP_REG1;
2416 	}
2417 
2418 	if (src2 & SLJIT_MEM) {
2419 		PTR_FAIL_IF(emit_op_mem2(compiler, flags, TMP_REG2, src2, src2w, 0, 0));
2420 		src2 = TMP_REG2;
2421 	}
2422 
2423 	if (src1 & SLJIT_IMM) {
2424 		if (src1w != 0) {
2425 			PTR_FAIL_IF(load_immediate(compiler, TMP_REG1, src1w, TMP_REG3));
2426 			src1 = TMP_REG1;
2427 		}
2428 		else
2429 			src1 = TMP_ZERO;
2430 	}
2431 
2432 	if (src2 & SLJIT_IMM) {
2433 		if (src2w != 0) {
2434 			PTR_FAIL_IF(load_immediate(compiler, TMP_REG2, src2w, TMP_REG3));
2435 			src2 = TMP_REG2;
2436 		}
2437 		else
2438 			src2 = TMP_ZERO;
2439 	}
2440 
2441 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2442 	PTR_FAIL_IF(!jump);
2443 	set_jump(jump, compiler, (sljit_u32)((type & SLJIT_REWRITABLE_JUMP) | IS_COND));
2444 	type &= 0xff;
2445 
2446 	switch (type) {
2447 	case SLJIT_EQUAL:
2448 		inst = BNE | RS1(src1) | RS2(src2) | BRANCH_LENGTH;
2449 		break;
2450 	case SLJIT_NOT_EQUAL:
2451 		inst = BEQ | RS1(src1) | RS2(src2) | BRANCH_LENGTH;
2452 		break;
2453 	case SLJIT_LESS:
2454 		inst = BGEU | RS1(src1) | RS2(src2) | BRANCH_LENGTH;
2455 		break;
2456 	case SLJIT_GREATER_EQUAL:
2457 		inst = BLTU | RS1(src1) | RS2(src2) | BRANCH_LENGTH;
2458 		break;
2459 	case SLJIT_GREATER:
2460 		inst = BGEU | RS1(src2) | RS2(src1) | BRANCH_LENGTH;
2461 		break;
2462 	case SLJIT_LESS_EQUAL:
2463 		inst = BLTU | RS1(src2) | RS2(src1) | BRANCH_LENGTH;
2464 		break;
2465 	case SLJIT_SIG_LESS:
2466 		inst = BGE | RS1(src1) | RS2(src2) | BRANCH_LENGTH;
2467 		break;
2468 	case SLJIT_SIG_GREATER_EQUAL:
2469 		inst = BLT | RS1(src1) | RS2(src2) | BRANCH_LENGTH;
2470 		break;
2471 	case SLJIT_SIG_GREATER:
2472 		inst = BGE | RS1(src2) | RS2(src1) | BRANCH_LENGTH;
2473 		break;
2474 	case SLJIT_SIG_LESS_EQUAL:
2475 		inst = BLT | RS1(src2) | RS2(src1) | BRANCH_LENGTH;
2476 		break;
2477 	}
2478 
2479 	PTR_FAIL_IF(push_inst(compiler, inst));
2480 
2481 	jump->addr = compiler->size;
2482 	PTR_FAIL_IF(push_inst(compiler, JALR | RD(TMP_ZERO) | RS1(TMP_REG1) | IMM_I(0)));
2483 
2484 	/* Maximum number of instructions required for generating a constant. */
2485 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2486 	compiler->size += 1;
2487 #else
2488 	compiler->size += 5;
2489 #endif
2490 	return jump;
2491 }
2492 
2493 #undef BRANCH_LENGTH
2494 
sljit_emit_ijump(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 src,sljit_sw srcw)2495 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)
2496 {
2497 	struct sljit_jump *jump;
2498 
2499 	CHECK_ERROR();
2500 	CHECK(check_sljit_emit_ijump(compiler, type, src, srcw));
2501 
2502 	if (!(src & SLJIT_IMM)) {
2503 		if (src & SLJIT_MEM) {
2504 			ADJUST_LOCAL_OFFSET(src, srcw);
2505 			FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
2506 			src = TMP_REG1;
2507 		}
2508 		return push_inst(compiler, JALR | RD((type >= SLJIT_FAST_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | RS1(src) | IMM_I(0));
2509 	}
2510 
2511 	/* These jumps are converted to jump/call instructions when possible. */
2512 	jump = (struct sljit_jump*)ensure_abuf(compiler, sizeof(struct sljit_jump));
2513 	FAIL_IF(!jump);
2514 	set_jump(jump, compiler, JUMP_ADDR | ((type >= SLJIT_FAST_CALL) ? IS_CALL : 0));
2515 	jump->u.target = (sljit_uw)srcw;
2516 
2517 	jump->addr = compiler->size;
2518 	FAIL_IF(push_inst(compiler, JALR | RD((type >= SLJIT_FAST_CALL) ? RETURN_ADDR_REG : TMP_ZERO) | RS1(TMP_REG1) | IMM_I(0)));
2519 
2520 	/* Maximum number of instructions required for generating a constant. */
2521 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2522 	compiler->size += 1;
2523 #else
2524 	compiler->size += 5;
2525 #endif
2526 	return SLJIT_SUCCESS;
2527 }
2528 
sljit_emit_icall(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 arg_types,sljit_s32 src,sljit_sw srcw)2529 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_icall(struct sljit_compiler *compiler, sljit_s32 type,
2530 	sljit_s32 arg_types,
2531 	sljit_s32 src, sljit_sw srcw)
2532 {
2533 	SLJIT_UNUSED_ARG(arg_types);
2534 	CHECK_ERROR();
2535 	CHECK(check_sljit_emit_icall(compiler, type, arg_types, src, srcw));
2536 
2537 	if (src & SLJIT_MEM) {
2538 		ADJUST_LOCAL_OFFSET(src, srcw);
2539 		FAIL_IF(emit_op_mem(compiler, WORD_DATA | LOAD_DATA, TMP_REG1, src, srcw));
2540 		src = TMP_REG1;
2541 	}
2542 
2543 	if (type & SLJIT_CALL_RETURN) {
2544 		if (src >= SLJIT_FIRST_SAVED_REG && src <= (SLJIT_S0 - SLJIT_KEPT_SAVEDS_COUNT(compiler->options))) {
2545 			FAIL_IF(push_inst(compiler, ADDI | RD(TMP_REG1) | RS1(src) | IMM_I(0)));
2546 			src = TMP_REG1;
2547 		}
2548 
2549 		FAIL_IF(emit_stack_frame_release(compiler, 0));
2550 		type = SLJIT_JUMP;
2551 	}
2552 
2553 	SLJIT_SKIP_CHECKS(compiler);
2554 	return sljit_emit_ijump(compiler, type, src, srcw);
2555 }
2556 
sljit_emit_op_flags(struct sljit_compiler * compiler,sljit_s32 op,sljit_s32 dst,sljit_sw dstw,sljit_s32 type)2557 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op,
2558 	sljit_s32 dst, sljit_sw dstw,
2559 	sljit_s32 type)
2560 {
2561 	sljit_s32 src_r, dst_r, invert;
2562 	sljit_s32 saved_op = op;
2563 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2564 	sljit_s32 mem_type = WORD_DATA;
2565 #else
2566 	sljit_s32 mem_type = ((op & SLJIT_32) || op == SLJIT_MOV32) ? (INT_DATA | SIGNED_DATA) : WORD_DATA;
2567 #endif
2568 
2569 	CHECK_ERROR();
2570 	CHECK(check_sljit_emit_op_flags(compiler, op, dst, dstw, type));
2571 	ADJUST_LOCAL_OFFSET(dst, dstw);
2572 
2573 	op = GET_OPCODE(op);
2574 	dst_r = (op < SLJIT_ADD && FAST_IS_REG(dst)) ? dst : TMP_REG2;
2575 
2576 	compiler->cache_arg = 0;
2577 	compiler->cache_argw = 0;
2578 
2579 	if (op >= SLJIT_ADD && (dst & SLJIT_MEM))
2580 		FAIL_IF(emit_op_mem2(compiler, mem_type | LOAD_DATA, TMP_REG1, dst, dstw, dst, dstw));
2581 
2582 	if (type < SLJIT_F_EQUAL) {
2583 		src_r = OTHER_FLAG;
2584 		invert = type & 0x1;
2585 
2586 		switch (type) {
2587 		case SLJIT_EQUAL:
2588 		case SLJIT_NOT_EQUAL:
2589 			FAIL_IF(push_inst(compiler, SLTUI | RD(dst_r) | RS1(EQUAL_FLAG) | IMM_I(1)));
2590 			src_r = dst_r;
2591 			break;
2592 		case SLJIT_OVERFLOW:
2593 		case SLJIT_NOT_OVERFLOW:
2594 			if (compiler->status_flags_state & (SLJIT_CURRENT_FLAGS_ADD | SLJIT_CURRENT_FLAGS_SUB)) {
2595 				src_r = OTHER_FLAG;
2596 				break;
2597 			}
2598 			FAIL_IF(push_inst(compiler, SLTUI | RD(dst_r) | RS1(OTHER_FLAG) | IMM_I(1)));
2599 			src_r = dst_r;
2600 			invert ^= 0x1;
2601 			break;
2602 		}
2603 	} else {
2604 		invert = 0;
2605 		src_r = OTHER_FLAG;
2606 
2607 		switch (type) {
2608 		case SLJIT_F_NOT_EQUAL:
2609 		case SLJIT_UNORDERED_OR_NOT_EQUAL:
2610 		case SLJIT_UNORDERED_OR_EQUAL: /* Not supported. */
2611 		case SLJIT_F_GREATER_EQUAL:
2612 		case SLJIT_UNORDERED_OR_GREATER_EQUAL:
2613 		case SLJIT_UNORDERED_OR_LESS_EQUAL:
2614 		case SLJIT_F_GREATER:
2615 		case SLJIT_UNORDERED_OR_GREATER:
2616 		case SLJIT_UNORDERED_OR_LESS:
2617 		case SLJIT_UNORDERED:
2618 			invert = 1;
2619 			break;
2620 		}
2621 	}
2622 
2623 	if (invert) {
2624 		FAIL_IF(push_inst(compiler, XORI | RD(dst_r) | RS1(src_r) | IMM_I(1)));
2625 		src_r = dst_r;
2626 	}
2627 
2628 	if (op < SLJIT_ADD) {
2629 		if (dst & SLJIT_MEM)
2630 			return emit_op_mem(compiler, mem_type, src_r, dst, dstw);
2631 
2632 		if (src_r != dst_r)
2633 			return push_inst(compiler, ADDI | RD(dst_r) | RS1(src_r) | IMM_I(0));
2634 		return SLJIT_SUCCESS;
2635 	}
2636 
2637 	mem_type |= CUMULATIVE_OP | IMM_OP | ALT_KEEP_CACHE;
2638 
2639 	if (dst & SLJIT_MEM)
2640 		return emit_op(compiler, saved_op, mem_type, dst, dstw, TMP_REG1, 0, src_r, 0);
2641 	return emit_op(compiler, saved_op, mem_type, dst, dstw, dst, dstw, src_r, 0);
2642 }
2643 
sljit_emit_cmov(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 dst_reg,sljit_s32 src,sljit_sw srcw)2644 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_cmov(struct sljit_compiler *compiler, sljit_s32 type,
2645 	sljit_s32 dst_reg,
2646 	sljit_s32 src, sljit_sw srcw)
2647 {
2648 	CHECK_ERROR();
2649 	CHECK(check_sljit_emit_cmov(compiler, type, dst_reg, src, srcw));
2650 
2651 	return sljit_emit_cmov_generic(compiler, type, dst_reg, src, srcw);;
2652 }
2653 
sljit_emit_mem(struct sljit_compiler * compiler,sljit_s32 type,sljit_s32 reg,sljit_s32 mem,sljit_sw memw)2654 SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_mem(struct sljit_compiler *compiler, sljit_s32 type,
2655 	sljit_s32 reg,
2656 	sljit_s32 mem, sljit_sw memw)
2657 {
2658 	sljit_s32 flags;
2659 
2660 	CHECK_ERROR();
2661 	CHECK(check_sljit_emit_mem(compiler, type, reg, mem, memw));
2662 
2663 	if (!(reg & REG_PAIR_MASK))
2664 		return sljit_emit_mem_unaligned(compiler, type, reg, mem, memw);
2665 
2666 	if (SLJIT_UNLIKELY(mem & OFFS_REG_MASK)) {
2667 		memw &= 0x3;
2668 
2669 		if (SLJIT_UNLIKELY(memw != 0)) {
2670 			FAIL_IF(push_inst(compiler, SLLI | RD(TMP_REG1) | RS1(OFFS_REG(mem)) | IMM_I(memw)));
2671 			FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RS1(TMP_REG1) | RS2(mem & REG_MASK)));
2672 		} else
2673 			FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RS1(mem & REG_MASK) | RS2(OFFS_REG(mem))));
2674 
2675 		mem = TMP_REG1;
2676 		memw = 0;
2677 	} else if (memw > SIMM_MAX - SSIZE_OF(sw) || memw < SIMM_MIN) {
2678 		if (((memw + 0x800) & 0xfff) <= 0xfff - SSIZE_OF(sw)) {
2679 			FAIL_IF(load_immediate(compiler, TMP_REG1, TO_ARGW_HI(memw), TMP_REG3));
2680 			memw &= 0xfff;
2681 		} else {
2682 			FAIL_IF(load_immediate(compiler, TMP_REG1, memw, TMP_REG3));
2683 			memw = 0;
2684 		}
2685 
2686 		if (mem & REG_MASK)
2687 			FAIL_IF(push_inst(compiler, ADD | RD(TMP_REG1) | RS1(TMP_REG1) | RS2(mem & REG_MASK)));
2688 
2689 		mem = TMP_REG1;
2690 	} else {
2691 		mem &= REG_MASK;
2692 		memw &= 0xfff;
2693 	}
2694 
2695 	SLJIT_ASSERT((memw >= 0 && memw <= SIMM_MAX - SSIZE_OF(sw)) || (memw > SIMM_MAX && memw <= 0xfff));
2696 
2697 	if (!(type & SLJIT_MEM_STORE) && mem == REG_PAIR_FIRST(reg)) {
2698 		FAIL_IF(push_mem_inst(compiler, WORD_DATA | LOAD_DATA, REG_PAIR_SECOND(reg), mem, (memw + SSIZE_OF(sw)) & 0xfff));
2699 		return push_mem_inst(compiler, WORD_DATA | LOAD_DATA, REG_PAIR_FIRST(reg), mem, memw);
2700 	}
2701 
2702 	flags = WORD_DATA | (!(type & SLJIT_MEM_STORE) ? LOAD_DATA : 0);
2703 
2704 	FAIL_IF(push_mem_inst(compiler, flags, REG_PAIR_FIRST(reg), mem, memw));
2705 	return push_mem_inst(compiler, flags, REG_PAIR_SECOND(reg), mem, (memw + SSIZE_OF(sw)) & 0xfff);
2706 }
2707 
2708 #undef TO_ARGW_HI
2709 
sljit_emit_const(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw,sljit_sw init_value)2710 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)
2711 {
2712 	struct sljit_const *const_;
2713 	sljit_s32 dst_r;
2714 
2715 	CHECK_ERROR_PTR();
2716 	CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));
2717 	ADJUST_LOCAL_OFFSET(dst, dstw);
2718 
2719 	const_ = (struct sljit_const*)ensure_abuf(compiler, sizeof(struct sljit_const));
2720 	PTR_FAIL_IF(!const_);
2721 	set_const(const_, compiler);
2722 
2723 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
2724 	PTR_FAIL_IF(emit_const(compiler, dst_r, init_value, ADDI | RD(dst_r)));
2725 
2726 	if (dst & SLJIT_MEM)
2727 		PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
2728 
2729 	return const_;
2730 }
2731 
sljit_emit_put_label(struct sljit_compiler * compiler,sljit_s32 dst,sljit_sw dstw)2732 SLJIT_API_FUNC_ATTRIBUTE struct sljit_put_label* sljit_emit_put_label(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)
2733 {
2734 	struct sljit_put_label *put_label;
2735 	sljit_s32 dst_r;
2736 
2737 	CHECK_ERROR_PTR();
2738 	CHECK_PTR(check_sljit_emit_put_label(compiler, dst, dstw));
2739 	ADJUST_LOCAL_OFFSET(dst, dstw);
2740 
2741 	put_label = (struct sljit_put_label*)ensure_abuf(compiler, sizeof(struct sljit_put_label));
2742 	PTR_FAIL_IF(!put_label);
2743 	set_put_label(put_label, compiler, 0);
2744 
2745 	dst_r = FAST_IS_REG(dst) ? dst : TMP_REG2;
2746 	PTR_FAIL_IF(push_inst(compiler, (sljit_ins)dst_r));
2747 #if (defined SLJIT_CONFIG_RISCV_32 && SLJIT_CONFIG_RISCV_32)
2748 	compiler->size += 1;
2749 #else
2750 	compiler->size += 5;
2751 #endif
2752 
2753 	if (dst & SLJIT_MEM)
2754 		PTR_FAIL_IF(emit_op_mem(compiler, WORD_DATA, TMP_REG2, dst, dstw));
2755 
2756 	return put_label;
2757 }
2758 
sljit_set_const(sljit_uw addr,sljit_sw new_constant,sljit_sw executable_offset)2759 SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)
2760 {
2761 	sljit_set_jump_addr(addr, (sljit_uw)new_constant, executable_offset);
2762 }
2763