/external/llvm/test/Transforms/MergeFunc/ |
D | 2011-02-08-RemoveEqual.ll | 7 %"struct.kc::impl_Ccode_option" = type { %"struct.kc::impl_abstract_phylum" } 8 %"struct.kc::impl_CexpressionDQ" = type { %"struct.kc::impl_Ccode_option", %"struct.kc::impl_Ccode_… 9 …ruct.kc::impl_Ctext" = type { %"struct.kc::impl_Ccode_option", i32, %"struct.kc::impl_casestring__… 10 %"struct.kc::impl_Ctext_elem" = type { %"struct.kc::impl_abstract_phylum", i32, %"struct.kc::impl_c… 11 …uct.kc::impl_ID" = type { %"struct.kc::impl_abstract_phylum", %"struct.kc::impl_Ccode_option"*, %"… 12 %"struct.kc::impl_abstract_phylum" = type { i32 (...)** } 13 …"struct.kc::impl_ac_abstract_declarator_AcAbsdeclDirdecl" = type { %"struct.kc::impl_Ccode_option"… 14 %"struct.kc::impl_casestring__Str" = type { %"struct.kc::impl_abstract_phylum", i8* } 15 %"struct.kc::impl_elem_patternrepresentation" = type { %"struct.kc::impl_abstract_phylum", i32, %"s… 16 %"struct.kc::impl_fileline" = type { %"struct.kc::impl_abstract_phylum", %"struct.kc::impl_casestri… [all …]
|
/external/libxkbcommon/test/ |
D | keymap.c | 39 xkb_keycode_t kc; in test_garbage_key() local 51 kc = xkb_keymap_key_by_name(keymap, "TLDE"); in test_garbage_key() 52 assert(kc != XKB_KEYCODE_INVALID); in test_garbage_key() 53 nlevels = xkb_keymap_num_levels_for_key(keymap, kc, first_layout); in test_garbage_key() 55 nsyms = xkb_keymap_key_get_syms_by_level(keymap, kc, first_layout, 0, &syms); in test_garbage_key() 58 nsyms = xkb_keymap_key_get_syms_by_level(keymap, kc, first_layout, 1, &syms); in test_garbage_key() 63 kc = xkb_keymap_key_by_name(keymap, "AE13"); in test_garbage_key() 64 assert(kc != XKB_KEYCODE_INVALID); in test_garbage_key() 65 nlevels = xkb_keymap_num_levels_for_key(keymap, kc, first_layout); in test_garbage_key() 67 nsyms = xkb_keymap_key_get_syms_by_level(keymap, kc, first_layout, 0, &syms); in test_garbage_key() [all …]
|
/external/llvm/test/CodeGen/ARM/ |
D | 2008-02-29-RegAllocLocal.ll | 4 %"struct.kc::impl_Ccode_option" = type { %"struct.kc::impl_abstract_phylum" } 5 …uct.kc::impl_ID" = type { %"struct.kc::impl_abstract_phylum", %"struct.kc::impl_Ccode_option"*, %"… 6 %"struct.kc::impl_abstract_phylum" = type { i32 (...)** } 7 %"struct.kc::impl_casestring__Str" = type { %"struct.kc::impl_abstract_phylum", i8* } 9 …"struct.kc::impl_ID"* @_ZN2kc18f_typeofunpsubtermEPNS_15impl_unpsubtermEPNS_7impl_IDE(%"struct.kc:… 11 …%tmp8 = getelementptr %"struct.kc::impl_Ccode_option", %"struct.kc::impl_Ccode_option"* %a_unpsubt… 15 ret %"struct.kc::impl_ID"* null 18 %tmp67 = tail call i32 null( %"struct.kc::impl_abstract_phylum"* null ) ; <i32> [#uses=0] 19 %tmp97 = tail call i32 null( %"struct.kc::impl_abstract_phylum"* null ) ; <i32> [#uses=0] 20 ret %"struct.kc::impl_ID"* null
|
/external/XNNPACK/src/ |
D | packing.c | 22 size_t kc, in xnn_pack_f32_gemm_goi_w() argument 45 … for (size_t kr_block_start = 0; kr_block_start < round_up_po2(kc, skr); kr_block_start += kr) { in xnn_pack_f32_gemm_goi_w() 49 if (kc_idx < kc) { in xnn_pack_f32_gemm_goi_w() 50 packed_w[kr_block_offset] = k[(nr_block_start + nr_block_offset) * kc + kc_idx]; in xnn_pack_f32_gemm_goi_w() 59 k += nc * kc; in xnn_pack_f32_gemm_goi_w() 69 size_t kc, in xnn_pack_f16_gemm_goi_w() argument 92 … for (size_t kr_block_start = 0; kr_block_start < round_up_po2(kc, skr); kr_block_start += kr) { in xnn_pack_f16_gemm_goi_w() 96 if (kc_idx < kc) { in xnn_pack_f16_gemm_goi_w() 97 packed_w[kr_block_offset] = k[(nr_block_start + nr_block_offset) * kc + kc_idx]; in xnn_pack_f16_gemm_goi_w() 106 k += nc * kc; in xnn_pack_f16_gemm_goi_w() [all …]
|
/external/XNNPACK/src/xnnpack/ |
D | pack.h | 36 size_t kc, 49 size_t kc, 62 size_t kc, 75 size_t kc, 88 size_t kc, 101 size_t kc, 114 size_t kc, 127 size_t kc, 138 size_t kc, 149 size_t kc, [all …]
|
/external/libxkbcommon/src/xkbcomp/ |
D | keycodes.c | 180 AddKeyName(KeyNamesInfo *info, xkb_keycode_t kc, xkb_atom_t name, in AddKeyName() argument 189 if (kc >= darray_size(info->key_names)) in AddKeyName() 190 darray_resize0(info->key_names, kc + 1); in AddKeyName() 192 info->min_key_code = MIN(info->min_key_code, kc); in AddKeyName() 193 info->max_key_code = MAX(info->max_key_code, kc); in AddKeyName() 196 old_name = darray_item(info->key_names, kc); in AddKeyName() 206 lname, kc); in AddKeyName() 213 "Using %s, ignoring %s\n", kc, lname, kname); in AddKeyName() 220 "Using %s, ignoring %s\n", kc, kname, lname); in AddKeyName() 221 darray_item(info->key_names, kc) = XKB_ATOM_NONE; in AddKeyName() [all …]
|
/external/libxkbcommon/src/ |
D | state.c | 145 xkb_state_key_get_level(struct xkb_state *state, xkb_keycode_t kc, in xkb_state_key_get_level() argument 148 const struct xkb_key *key = XkbKey(state->keymap, kc); in xkb_state_key_get_level() 204 xkb_state_key_get_layout(struct xkb_state *state, xkb_keycode_t kc) in xkb_state_key_get_layout() argument 206 const struct xkb_key *key = XkbKey(state->keymap, kc); in xkb_state_key_get_layout() 742 xkb_state_update_key(struct xkb_state *state, xkb_keycode_t kc, in xkb_state_update_key() argument 748 const struct xkb_key *key = XkbKey(state->keymap, kc); in xkb_state_update_key() 847 xkb_state_key_get_syms(struct xkb_state *state, xkb_keycode_t kc, in xkb_state_key_get_syms() argument 853 layout = xkb_state_key_get_layout(state, kc); in xkb_state_key_get_syms() 857 level = xkb_state_key_get_level(state, kc, layout); in xkb_state_key_get_syms() 861 return xkb_keymap_key_get_syms_by_level(state->keymap, kc, layout, level, in xkb_state_key_get_syms() [all …]
|
/external/XNNPACK/src/f32-gemm/gen/ |
D | 8x16-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() argument 33 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 34 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 95 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 167 a7 = (const float*) ((uintptr_t) a7 - kc); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 168 a6 = (const float*) ((uintptr_t) a6 - kc); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 169 a5 = (const float*) ((uintptr_t) a5 - kc); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 170 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 171 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() 172 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_8x16__avx512f_broadcast() [all …]
|
D | 4x2-wasm.c | 19 size_t kc, in xnn_f32_gemm_ukernel_4x2__wasm() argument 31 assert(kc != 0); in xnn_f32_gemm_ukernel_4x2__wasm() 32 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_ukernel_4x2__wasm() 69 size_t k = kc; in xnn_f32_gemm_ukernel_4x2__wasm() 107 a3 = (const void*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_ukernel_4x2__wasm() 108 a2 = (const void*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_ukernel_4x2__wasm() 109 a1 = (const void*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_ukernel_4x2__wasm() 110 a0 = (const void*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_ukernel_4x2__wasm()
|
D | 4x2-scalar.c | 19 size_t kc, in xnn_f32_gemm_ukernel_4x2__scalar() argument 31 assert(kc != 0); in xnn_f32_gemm_ukernel_4x2__scalar() 32 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_ukernel_4x2__scalar() 69 size_t k = kc; in xnn_f32_gemm_ukernel_4x2__scalar() 107 a3 = (const void*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_ukernel_4x2__scalar() 108 a2 = (const void*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_ukernel_4x2__scalar() 109 a1 = (const void*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_ukernel_4x2__scalar() 110 a0 = (const void*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_ukernel_4x2__scalar()
|
D | 6x16-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() argument 33 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 34 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 81 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 139 a5 = (const float*) ((uintptr_t) a5 - kc); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 140 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 141 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 142 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 143 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast() 144 a0 = (const float*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_minmax_ukernel_6x16__avx512f_broadcast()
|
D | 7x16-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() argument 33 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 34 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 88 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 153 a6 = (const float*) ((uintptr_t) a6 - kc); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 154 a5 = (const float*) ((uintptr_t) a5 - kc); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 155 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 156 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 157 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() 158 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_minmax_ukernel_7x16__avx512f_broadcast() [all …]
|
D | 4x2-relu-scalar.c | 19 size_t kc, in xnn_f32_gemm_relu_ukernel_4x2__scalar() argument 31 assert(kc != 0); in xnn_f32_gemm_relu_ukernel_4x2__scalar() 32 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_relu_ukernel_4x2__scalar() 69 size_t k = kc; in xnn_f32_gemm_relu_ukernel_4x2__scalar() 115 a3 = (const void*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_relu_ukernel_4x2__scalar() 116 a2 = (const void*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_relu_ukernel_4x2__scalar() 117 a1 = (const void*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_relu_ukernel_4x2__scalar() 118 a0 = (const void*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_relu_ukernel_4x2__scalar()
|
D | 4x2-relu-wasm.c | 19 size_t kc, in xnn_f32_gemm_relu_ukernel_4x2__wasm() argument 31 assert(kc != 0); in xnn_f32_gemm_relu_ukernel_4x2__wasm() 32 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_relu_ukernel_4x2__wasm() 69 size_t k = kc; in xnn_f32_gemm_relu_ukernel_4x2__wasm() 115 a3 = (const void*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_relu_ukernel_4x2__wasm() 116 a2 = (const void*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_relu_ukernel_4x2__wasm() 117 a1 = (const void*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_relu_ukernel_4x2__wasm() 118 a0 = (const void*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_relu_ukernel_4x2__wasm()
|
D | 5x16-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() argument 33 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 34 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 74 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 125 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 126 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 127 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 128 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast() 129 a0 = (const float*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_minmax_ukernel_5x16__avx512f_broadcast()
|
D | 4x16-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() argument 33 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() 34 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() 67 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() 111 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() 112 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() 113 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast() 114 a0 = (const float*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_minmax_ukernel_4x16__avx512f_broadcast()
|
D | 4x2-minmax-scalar.c | 19 size_t kc, in xnn_f32_gemm_minmax_ukernel_4x2__scalar() argument 31 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_4x2__scalar() 32 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_4x2__scalar() 71 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_4x2__scalar() 126 a3 = (const void*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__scalar() 127 a2 = (const void*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__scalar() 128 a1 = (const void*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__scalar() 129 a0 = (const void*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__scalar()
|
D | 4x2-minmax-wasm.c | 19 size_t kc, in xnn_f32_gemm_minmax_ukernel_4x2__wasm() argument 31 assert(kc != 0); in xnn_f32_gemm_minmax_ukernel_4x2__wasm() 32 assert(kc % sizeof(float) == 0); in xnn_f32_gemm_minmax_ukernel_4x2__wasm() 71 size_t k = kc; in xnn_f32_gemm_minmax_ukernel_4x2__wasm() 126 a3 = (const void*) ((uintptr_t) a3 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__wasm() 127 a2 = (const void*) ((uintptr_t) a2 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__wasm() 128 a1 = (const void*) ((uintptr_t) a1 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__wasm() 129 a0 = (const void*) ((uintptr_t) a0 - kc); in xnn_f32_gemm_minmax_ukernel_4x2__wasm()
|
/external/XNNPACK/src/f32-gemm/gen-inc/ |
D | 6x16inc-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() argument 34 assert(kc != 0); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 35 assert(kc % sizeof(float) == 0); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 83 size_t k = kc; in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 141 a5 = (const float*) ((uintptr_t) a5 - kc); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 142 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 143 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 144 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 145 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast() 146 a0 = (const float*) ((uintptr_t) a0 - kc); in xnn_f32_gemminc_minmax_ukernel_6x16__avx512f_broadcast()
|
D | 7x16inc-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() argument 34 assert(kc != 0); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 35 assert(kc % sizeof(float) == 0); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 90 size_t k = kc; in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 155 a6 = (const float*) ((uintptr_t) a6 - kc); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 156 a5 = (const float*) ((uintptr_t) a5 - kc); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 157 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 158 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 159 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() 160 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemminc_minmax_ukernel_7x16__avx512f_broadcast() [all …]
|
D | 8x16inc-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() argument 34 assert(kc != 0); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 35 assert(kc % sizeof(float) == 0); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 97 size_t k = kc; in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 169 a7 = (const float*) ((uintptr_t) a7 - kc); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 170 a6 = (const float*) ((uintptr_t) a6 - kc); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 171 a5 = (const float*) ((uintptr_t) a5 - kc); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 172 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 173 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() 174 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemminc_minmax_ukernel_8x16__avx512f_broadcast() [all …]
|
D | 5x16inc-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() argument 34 assert(kc != 0); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 35 assert(kc % sizeof(float) == 0); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 76 size_t k = kc; in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 127 a4 = (const float*) ((uintptr_t) a4 - kc); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 128 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 129 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 130 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast() 131 a0 = (const float*) ((uintptr_t) a0 - kc); in xnn_f32_gemminc_minmax_ukernel_5x16__avx512f_broadcast()
|
D | 4x16inc-minmax-avx512f-broadcast.c | 21 size_t kc, in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() argument 34 assert(kc != 0); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() 35 assert(kc % sizeof(float) == 0); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() 69 size_t k = kc; in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() 113 a3 = (const float*) ((uintptr_t) a3 - kc); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() 114 a2 = (const float*) ((uintptr_t) a2 - kc); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() 115 a1 = (const float*) ((uintptr_t) a1 - kc); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast() 116 a0 = (const float*) ((uintptr_t) a0 - kc); in xnn_f32_gemminc_minmax_ukernel_4x16__avx512f_broadcast()
|
/external/XNNPACK/bench/ |
D | f32-gemm.cc | 47 const size_t kc = state.range(2); in GEMMBenchmark() local 50 const size_t kc_stride = benchmark::utils::RoundUp(kc, kr * sr); in GEMMBenchmark() 56 std::vector<float> a(mc * kc); in GEMMBenchmark() 58 std::vector<float> k(nc * kc); in GEMMBenchmark() 71 …xnn_pack_f32_gemm_goi_w(1 /* groups */, nc, kc, nr, kr, sr, k.data(), b.data(), w.data(), 0, nullp… in GEMMBenchmark() 93 mb, nc, kc * sizeof(float), in GEMMBenchmark() 94 a.data() + m * kc, kc * sizeof(float), in GEMMBenchmark() 107 uint64_t(state.iterations()) * 2 * mc * nc * kc, benchmark::Counter::kIsRate); in GEMMBenchmark() 123 const size_t kc = state.range(2); in PPMM1PBenchmark() local 131 std::vector<float> a(mc * kc); in PPMM1PBenchmark() [all …]
|
/external/XNNPACK/src/qc8-gemm/gen/ |
D | 1x4c2s4-minmax-fp32-wasmsimd-dot16x2-ld128.c | 22 size_t kc, in xnn_qc8_gemm_minmax_fp32_ukernel_1x4c2s4__wasmsimd_dot16x2_ld128() argument 34 assert(kc != 0); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4c2s4__wasmsimd_dot16x2_ld128() 35 assert(kc % sizeof(int8_t) == 0); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4c2s4__wasmsimd_dot16x2_ld128() 43 kc = round_up_po2(kc, 8 * sizeof(int8_t)); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4c2s4__wasmsimd_dot16x2_ld128() 48 size_t k = kc; in xnn_qc8_gemm_minmax_fp32_ukernel_1x4c2s4__wasmsimd_dot16x2_ld128() 102 a0 = (const int8_t*) ((uintptr_t) a0 - kc); in xnn_qc8_gemm_minmax_fp32_ukernel_1x4c2s4__wasmsimd_dot16x2_ld128()
|