/third_party/pcre2/pcre2/src/sljit/ |
D | sljitNativeX86_64.c | 278 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr) in generate_far_jump_code() argument 289 *code_ptr++ = U8(get_jump_code(type ^ 0x1) - 0x10); in generate_far_jump_code() 290 *code_ptr++ = short_addr ? (6 + 3) : (10 + 3); in generate_far_jump_code() 293 *code_ptr++ = short_addr ? REX_B : (REX_W | REX_B); in generate_far_jump_code() 294 *code_ptr++ = MOV_r_i32 | reg_lmap[TMP_REG2]; in generate_far_jump_code() 295 jump->addr = (sljit_uw)code_ptr; in generate_far_jump_code() 300 sljit_unaligned_store_s32(code_ptr, (sljit_s32)jump->u.target); in generate_far_jump_code() 302 sljit_unaligned_store_sw(code_ptr, (sljit_sw)jump->u.target); in generate_far_jump_code() 304 code_ptr += short_addr ? sizeof(sljit_s32) : sizeof(sljit_sw); in generate_far_jump_code() 306 *code_ptr++ = REX_B; in generate_far_jump_code() [all …]
|
D | sljitNativePPC_common.c | 264 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_… in detect_jump_type() argument 290 diff = ((sljit_sw)target_addr - (sljit_sw)(code_ptr) - executable_offset) & ~0x3l; in detect_jump_type() 388 sljit_ins *code_ptr; in sljit_generate_code() local 416 code_ptr = code; in sljit_generate_code() 430 *code_ptr = *buf_ptr++; in sljit_generate_code() 440 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); in sljit_generate_code() 441 label->size = (sljit_uw)(code_ptr - code); in sljit_generate_code() 446 jump->addr = (sljit_uw)(code_ptr - 3); in sljit_generate_code() 448 jump->addr = (sljit_uw)(code_ptr - 6); in sljit_generate_code() 450 if (detect_jump_type(jump, code_ptr, code, executable_offset)) { in sljit_generate_code() [all …]
|
D | sljitNativeARM_32.c | 287 static sljit_uw patch_pc_relative_loads(sljit_uw *last_pc_patch, sljit_uw *code_ptr, sljit_uw* cons… in patch_pc_relative_loads() argument 295 SLJIT_ASSERT(const_pool - code_ptr <= CONST_POOL_ALIGNMENT); in patch_pc_relative_loads() 302 while (last_pc_patch < code_ptr) { in patch_pc_relative_loads() 413 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_uw *code_ptr, sljit_u… in detect_jump_type() argument 422 code_ptr--; in detect_jump_type() 425 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset); in detect_jump_type() 428 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)); in detect_jump_type() 437 *code_ptr = (BL - CONDITIONAL) | (*(code_ptr + 1) & COND_MASK); in detect_jump_type() 444 *code_ptr = (B - CONDITIONAL) | (*code_ptr & COND_MASK); in detect_jump_type() 450 diff = ((sljit_sw)jump->u.target - (sljit_sw)code_ptr - executable_offset); in detect_jump_type() [all …]
|
D | sljitNativeARM_64.c | 159 static SLJIT_INLINE sljit_sw detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit_i… in detect_jump_type() argument 176 diff = (sljit_sw)target_addr - (sljit_sw)(code_ptr + 4) - executable_offset; in detect_jump_type() 181 code_ptr[-5] ^= (jump->flags & IS_CBZ) ? (0x1 << 24) : 0x1; in detect_jump_type() 196 code_ptr[-5] -= (2 << 5); in detect_jump_type() 197 code_ptr[-2] = code_ptr[0]; in detect_jump_type() 203 code_ptr[-5] -= (1 << 5); in detect_jump_type() 205 code_ptr[-1] = code_ptr[0]; in detect_jump_type() 233 sljit_ins *code_ptr; in sljit_generate_code() local 255 code_ptr = code; in sljit_generate_code() 269 *code_ptr = *buf_ptr++; in sljit_generate_code() [all …]
|
D | sljitNativeSPARC_common.c | 222 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit… in detect_jump_type() argument 230 return code_ptr; in detect_jump_type() 294 return code_ptr; in detect_jump_type() 301 sljit_ins *code_ptr; in sljit_generate_code() local 322 code_ptr = code; in sljit_generate_code() 336 *code_ptr = *buf_ptr++; in sljit_generate_code() 346 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); in sljit_generate_code() 347 label->size = (sljit_uw)(code_ptr - code); in sljit_generate_code() 352 jump->addr = (sljit_uw)(code_ptr - 3); in sljit_generate_code() 354 jump->addr = (sljit_uw)(code_ptr - 6); in sljit_generate_code() [all …]
|
D | sljitNativeX86_common.c | 435 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw execu… 437 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr); 438 static sljit_u8* generate_put_label_code(struct sljit_put_label *put_label, sljit_u8 *code_ptr, slj… 441 static sljit_u8* generate_near_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *cod… in generate_near_jump_code() argument 456 return generate_far_jump_code(jump, code_ptr); in generate_near_jump_code() 461 *code_ptr++ = JMP_i8; in generate_near_jump_code() 463 *code_ptr++ = JMP_i32; in generate_near_jump_code() 468 *code_ptr++ = CALL_i32; in generate_near_jump_code() 472 *code_ptr++ = U8(get_jump_code(type) - 0x10); in generate_near_jump_code() 476 *code_ptr++ = GROUP_0F; in generate_near_jump_code() [all …]
|
D | sljitNativeMIPS_common.c | 318 static SLJIT_INLINE sljit_ins* detect_jump_type(struct sljit_jump *jump, sljit_ins *code_ptr, sljit… in detect_jump_type() argument 327 return code_ptr; in detect_jump_type() 330 return code_ptr; in detect_jump_type() 447 return code_ptr; in detect_jump_type() 451 static __attribute__ ((noinline)) void sljit_cache_flush(void* code, void* code_ptr) in sljit_cache_flush() argument 453 SLJIT_CACHE_FLUSH(code, code_ptr); in sljit_cache_flush() 510 sljit_ins *code_ptr; in sljit_generate_code() local 531 code_ptr = code; in sljit_generate_code() 545 *code_ptr = *buf_ptr++; in sljit_generate_code() 554 label->addr = (sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); in sljit_generate_code() [all …]
|
D | sljitNativeARM_T2_32.c | 250 static SLJIT_INLINE sljit_s32 detect_jump_type(struct sljit_jump *jump, sljit_u16 *code_ptr, sljit_… in detect_jump_type() argument 261 diff = ((sljit_sw)jump->u.target - (sljit_sw)(code_ptr + 2) - executable_offset) >> 1; in detect_jump_type() 265 diff = ((sljit_sw)(code + jump->u.label->size) - (sljit_sw)(code_ptr + 2)) >> 1; in detect_jump_type() 374 sljit_u16 *code_ptr; in sljit_generate_code() local 394 code_ptr = code; in sljit_generate_code() 408 *code_ptr = *buf_ptr++; in sljit_generate_code() 417 label->addr = ((sljit_uw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset)) | 0x1; in sljit_generate_code() 418 label->size = (sljit_uw)(code_ptr - code); in sljit_generate_code() 422 jump->addr = (sljit_uw)code_ptr - ((jump->flags & IS_COND) ? 10 : 8); in sljit_generate_code() 423 code_ptr -= detect_jump_type(jump, code_ptr, code, executable_offset); in sljit_generate_code() [all …]
|
D | sljitNativeX86_32.c | 219 static sljit_u8* generate_far_jump_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_sw execu… in generate_far_jump_code() argument 224 *code_ptr++ = JMP_i32; in generate_far_jump_code() 228 *code_ptr++ = CALL_i32; in generate_far_jump_code() 232 *code_ptr++ = GROUP_0F; in generate_far_jump_code() 233 *code_ptr++ = get_jump_code(type); in generate_far_jump_code() 240 …sljit_unaligned_store_sw(code_ptr, (sljit_sw)(jump->u.target - (jump->addr + 4) - (sljit_uw)execut… in generate_far_jump_code() 241 code_ptr += 4; in generate_far_jump_code() 243 return code_ptr; in generate_far_jump_code()
|
D | sljitNativeS390X.c | 1405 void *code, *code_ptr; in sljit_generate_code() local 1482 code_ptr = code; in sljit_generate_code() 1498 (sljit_uw)code_ptr + label->size, executable_offset); in sljit_generate_code() 1518 source = (sljit_sw)code_ptr; in sljit_generate_code() 1542 source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); in sljit_generate_code() 1549 encode_inst(&code_ptr, lgrl(tmp1, offset & 0xffffffff)); in sljit_generate_code() 1569 jump->addr = (sljit_uw)code_ptr + 2; in sljit_generate_code() 1570 source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); in sljit_generate_code() 1584 source = (sljit_sw)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset); in sljit_generate_code() 1587 put_label->addr = (sljit_uw)code_ptr; in sljit_generate_code() [all …]
|
/third_party/mesa3d/src/amd/vulkan/ |
D | radv_shader.h | 496 uint8_t *code_ptr; member
|
D | radv_shader.c | 1985 shader->code_ptr = dest_ptr; in radv_shader_binary_upload() 1997 shader->code_ptr = dest_ptr; in radv_shader_binary_upload()
|
/third_party/mesa3d/src/amd/vulkan/layers/ |
D | radv_sqtt_layer.c | 895 memcpy(code, shader->code_ptr, shader->code_size); in radv_add_code_object()
|
/third_party/vk-gl-cts/external/openglcts/modules/gles31/ |
D | es31cArrayOfArraysTests.cpp | 726 const char* code_ptr = shader_source.c_str(); in compile_shader_and_get_compilation_result() local 730 context_id.getTestContext().getLog() << tcu::TestLog::KernelSource(code_ptr); in compile_shader_and_get_compilation_result() 733 gl.shaderSource(shader_object_id, 1 /* count */, &code_ptr, NULL); in compile_shader_and_get_compilation_result() 766 context_id.getTestContext().getLog() << tcu::TestLog::KernelSource(code_ptr); in compile_shader_and_get_compilation_result()
|