1 /*
2 * Copyright (c) 2016, Alliance for Open Media. All rights reserved
3 *
4 * This source code is subject to the terms of the BSD 2 Clause License and
5 * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License
6 * was not distributed with this source code in the LICENSE file, you can
7 * obtain it at www.aomedia.org/license/software. If the Alliance for Open
8 * Media Patent License 1.0 was not distributed with this source code in the
9 * PATENTS file, you can obtain it at www.aomedia.org/license/patent.
10 */
11
12 #ifndef AOM_AOM_DSP_SIMD_V128_INTRINSICS_H_
13 #define AOM_AOM_DSP_SIMD_V128_INTRINSICS_H_
14
15 #include <stdio.h>
16 #include <stdlib.h>
17 #include <string.h>
18
19 #include "aom_dsp/simd/v128_intrinsics_c.h"
20 #include "aom_dsp/simd/v64_intrinsics.h"
21
22 /* Fallback to plain, unoptimised C. */
23
24 typedef c_v128 v128;
25
v128_low_u32(v128 a)26 SIMD_INLINE uint32_t v128_low_u32(v128 a) { return c_v128_low_u32(a); }
v128_low_v64(v128 a)27 SIMD_INLINE v64 v128_low_v64(v128 a) { return c_v128_low_v64(a); }
v128_high_v64(v128 a)28 SIMD_INLINE v64 v128_high_v64(v128 a) { return c_v128_high_v64(a); }
v128_from_64(uint64_t hi,uint64_t lo)29 SIMD_INLINE v128 v128_from_64(uint64_t hi, uint64_t lo) {
30 return c_v128_from_64(hi, lo);
31 }
v128_from_v64(v64 hi,v64 lo)32 SIMD_INLINE v128 v128_from_v64(v64 hi, v64 lo) {
33 return c_v128_from_v64(hi, lo);
34 }
v128_from_32(uint32_t a,uint32_t b,uint32_t c,uint32_t d)35 SIMD_INLINE v128 v128_from_32(uint32_t a, uint32_t b, uint32_t c, uint32_t d) {
36 return c_v128_from_32(a, b, c, d);
37 }
38
v128_load_unaligned(const void * p)39 SIMD_INLINE v128 v128_load_unaligned(const void *p) {
40 return c_v128_load_unaligned(p);
41 }
v128_load_aligned(const void * p)42 SIMD_INLINE v128 v128_load_aligned(const void *p) {
43 return c_v128_load_aligned(p);
44 }
45
v128_store_unaligned(void * p,v128 a)46 SIMD_INLINE void v128_store_unaligned(void *p, v128 a) {
47 c_v128_store_unaligned(p, a);
48 }
v128_store_aligned(void * p,v128 a)49 SIMD_INLINE void v128_store_aligned(void *p, v128 a) {
50 c_v128_store_aligned(p, a);
51 }
52
v128_align(v128 a,v128 b,unsigned int c)53 SIMD_INLINE v128 v128_align(v128 a, v128 b, unsigned int c) {
54 return c_v128_align(a, b, c);
55 }
56
v128_zero()57 SIMD_INLINE v128 v128_zero() { return c_v128_zero(); }
v128_dup_8(uint8_t x)58 SIMD_INLINE v128 v128_dup_8(uint8_t x) { return c_v128_dup_8(x); }
v128_dup_16(uint16_t x)59 SIMD_INLINE v128 v128_dup_16(uint16_t x) { return c_v128_dup_16(x); }
v128_dup_32(uint32_t x)60 SIMD_INLINE v128 v128_dup_32(uint32_t x) { return c_v128_dup_32(x); }
v128_dup_64(uint64_t x)61 SIMD_INLINE v128 v128_dup_64(uint64_t x) { return c_v128_dup_64(x); }
62
63 typedef uint32_t sad128_internal;
v128_sad_u8_init()64 SIMD_INLINE sad128_internal v128_sad_u8_init() { return c_v128_sad_u8_init(); }
v128_sad_u8(sad128_internal s,v128 a,v128 b)65 SIMD_INLINE sad128_internal v128_sad_u8(sad128_internal s, v128 a, v128 b) {
66 return c_v128_sad_u8(s, a, b);
67 }
v128_sad_u8_sum(sad128_internal s)68 SIMD_INLINE uint32_t v128_sad_u8_sum(sad128_internal s) {
69 return c_v128_sad_u8_sum(s);
70 }
71 typedef uint32_t ssd128_internal;
v128_ssd_u8_init()72 SIMD_INLINE ssd128_internal v128_ssd_u8_init() { return c_v128_ssd_u8_init(); }
v128_ssd_u8(ssd128_internal s,v128 a,v128 b)73 SIMD_INLINE ssd128_internal v128_ssd_u8(ssd128_internal s, v128 a, v128 b) {
74 return c_v128_ssd_u8(s, a, b);
75 }
v128_ssd_u8_sum(ssd128_internal s)76 SIMD_INLINE uint32_t v128_ssd_u8_sum(ssd128_internal s) {
77 return c_v128_ssd_u8_sum(s);
78 }
v128_dotp_su8(v128 a,v128 b)79 SIMD_INLINE int64_t v128_dotp_su8(v128 a, v128 b) {
80 return c_v128_dotp_su8(a, b);
81 }
v128_dotp_s16(v128 a,v128 b)82 SIMD_INLINE int64_t v128_dotp_s16(v128 a, v128 b) {
83 return c_v128_dotp_s16(a, b);
84 }
v128_dotp_s32(v128 a,v128 b)85 SIMD_INLINE int64_t v128_dotp_s32(v128 a, v128 b) {
86 return c_v128_dotp_s32(a, b);
87 }
v128_hadd_u8(v128 a)88 SIMD_INLINE uint64_t v128_hadd_u8(v128 a) { return c_v128_hadd_u8(a); }
89
v128_or(v128 a,v128 b)90 SIMD_INLINE v128 v128_or(v128 a, v128 b) { return c_v128_or(a, b); }
v128_xor(v128 a,v128 b)91 SIMD_INLINE v128 v128_xor(v128 a, v128 b) { return c_v128_xor(a, b); }
v128_and(v128 a,v128 b)92 SIMD_INLINE v128 v128_and(v128 a, v128 b) { return c_v128_and(a, b); }
v128_andn(v128 a,v128 b)93 SIMD_INLINE v128 v128_andn(v128 a, v128 b) { return c_v128_andn(a, b); }
94
v128_add_8(v128 a,v128 b)95 SIMD_INLINE v128 v128_add_8(v128 a, v128 b) { return c_v128_add_8(a, b); }
v128_add_16(v128 a,v128 b)96 SIMD_INLINE v128 v128_add_16(v128 a, v128 b) { return c_v128_add_16(a, b); }
v128_sadd_u8(v128 a,v128 b)97 SIMD_INLINE v128 v128_sadd_u8(v128 a, v128 b) { return c_v128_sadd_u8(a, b); }
v128_sadd_s8(v128 a,v128 b)98 SIMD_INLINE v128 v128_sadd_s8(v128 a, v128 b) { return c_v128_sadd_s8(a, b); }
v128_sadd_s16(v128 a,v128 b)99 SIMD_INLINE v128 v128_sadd_s16(v128 a, v128 b) { return c_v128_sadd_s16(a, b); }
v128_add_32(v128 a,v128 b)100 SIMD_INLINE v128 v128_add_32(v128 a, v128 b) { return c_v128_add_32(a, b); }
v128_add_64(v128 a,v128 b)101 SIMD_INLINE v128 v128_add_64(v128 a, v128 b) { return c_v128_add_64(a, b); }
v128_padd_u8(v128 a)102 SIMD_INLINE v128 v128_padd_u8(v128 a) { return c_v128_padd_u8(a); }
v128_padd_s16(v128 a)103 SIMD_INLINE v128 v128_padd_s16(v128 a) { return c_v128_padd_s16(a); }
v128_sub_8(v128 a,v128 b)104 SIMD_INLINE v128 v128_sub_8(v128 a, v128 b) { return c_v128_sub_8(a, b); }
v128_ssub_u8(v128 a,v128 b)105 SIMD_INLINE v128 v128_ssub_u8(v128 a, v128 b) { return c_v128_ssub_u8(a, b); }
v128_ssub_s8(v128 a,v128 b)106 SIMD_INLINE v128 v128_ssub_s8(v128 a, v128 b) { return c_v128_ssub_s8(a, b); }
v128_sub_16(v128 a,v128 b)107 SIMD_INLINE v128 v128_sub_16(v128 a, v128 b) { return c_v128_sub_16(a, b); }
v128_ssub_s16(v128 a,v128 b)108 SIMD_INLINE v128 v128_ssub_s16(v128 a, v128 b) { return c_v128_ssub_s16(a, b); }
v128_ssub_u16(v128 a,v128 b)109 SIMD_INLINE v128 v128_ssub_u16(v128 a, v128 b) { return c_v128_ssub_u16(a, b); }
v128_sub_32(v128 a,v128 b)110 SIMD_INLINE v128 v128_sub_32(v128 a, v128 b) { return c_v128_sub_32(a, b); }
v128_sub_64(v128 a,v128 b)111 SIMD_INLINE v128 v128_sub_64(v128 a, v128 b) { return c_v128_sub_64(a, b); }
v128_abs_s16(v128 a)112 SIMD_INLINE v128 v128_abs_s16(v128 a) { return c_v128_abs_s16(a); }
v128_abs_s8(v128 a)113 SIMD_INLINE v128 v128_abs_s8(v128 a) { return c_v128_abs_s8(a); }
114
v128_mul_s16(v64 a,v64 b)115 SIMD_INLINE v128 v128_mul_s16(v64 a, v64 b) { return c_v128_mul_s16(a, b); }
v128_mullo_s16(v128 a,v128 b)116 SIMD_INLINE v128 v128_mullo_s16(v128 a, v128 b) {
117 return c_v128_mullo_s16(a, b);
118 }
v128_mulhi_s16(v128 a,v128 b)119 SIMD_INLINE v128 v128_mulhi_s16(v128 a, v128 b) {
120 return c_v128_mulhi_s16(a, b);
121 }
v128_mullo_s32(v128 a,v128 b)122 SIMD_INLINE v128 v128_mullo_s32(v128 a, v128 b) {
123 return c_v128_mullo_s32(a, b);
124 }
v128_madd_s16(v128 a,v128 b)125 SIMD_INLINE v128 v128_madd_s16(v128 a, v128 b) { return c_v128_madd_s16(a, b); }
v128_madd_us8(v128 a,v128 b)126 SIMD_INLINE v128 v128_madd_us8(v128 a, v128 b) { return c_v128_madd_us8(a, b); }
127
v128_movemask_8(v128 a)128 SIMD_INLINE uint32_t v128_movemask_8(v128 a) { return c_v128_movemask_8(a); }
v128_blend_8(v128 a,v128 b,v128 c)129 SIMD_INLINE v128 v128_blend_8(v128 a, v128 b, v128 c) {
130 return c_v128_blend_8(a, b, c);
131 }
132
v128_avg_u8(v128 a,v128 b)133 SIMD_INLINE v128 v128_avg_u8(v128 a, v128 b) { return c_v128_avg_u8(a, b); }
v128_rdavg_u8(v128 a,v128 b)134 SIMD_INLINE v128 v128_rdavg_u8(v128 a, v128 b) { return c_v128_rdavg_u8(a, b); }
v128_rdavg_u16(v128 a,v128 b)135 SIMD_INLINE v128 v128_rdavg_u16(v128 a, v128 b) {
136 return c_v128_rdavg_u16(a, b);
137 }
v128_avg_u16(v128 a,v128 b)138 SIMD_INLINE v128 v128_avg_u16(v128 a, v128 b) { return c_v128_avg_u16(a, b); }
v128_min_u8(v128 a,v128 b)139 SIMD_INLINE v128 v128_min_u8(v128 a, v128 b) { return c_v128_min_u8(a, b); }
v128_max_u8(v128 a,v128 b)140 SIMD_INLINE v128 v128_max_u8(v128 a, v128 b) { return c_v128_max_u8(a, b); }
v128_min_s8(v128 a,v128 b)141 SIMD_INLINE v128 v128_min_s8(v128 a, v128 b) { return c_v128_min_s8(a, b); }
v128_max_s8(v128 a,v128 b)142 SIMD_INLINE v128 v128_max_s8(v128 a, v128 b) { return c_v128_max_s8(a, b); }
v128_min_s16(v128 a,v128 b)143 SIMD_INLINE v128 v128_min_s16(v128 a, v128 b) { return c_v128_min_s16(a, b); }
v128_max_s16(v128 a,v128 b)144 SIMD_INLINE v128 v128_max_s16(v128 a, v128 b) { return c_v128_max_s16(a, b); }
v128_min_s32(v128 a,v128 b)145 SIMD_INLINE v128 v128_min_s32(v128 a, v128 b) { return c_v128_min_s32(a, b); }
v128_max_s32(v128 a,v128 b)146 SIMD_INLINE v128 v128_max_s32(v128 a, v128 b) { return c_v128_max_s32(a, b); }
147
v128_ziplo_8(v128 a,v128 b)148 SIMD_INLINE v128 v128_ziplo_8(v128 a, v128 b) { return c_v128_ziplo_8(a, b); }
v128_ziphi_8(v128 a,v128 b)149 SIMD_INLINE v128 v128_ziphi_8(v128 a, v128 b) { return c_v128_ziphi_8(a, b); }
v128_ziplo_16(v128 a,v128 b)150 SIMD_INLINE v128 v128_ziplo_16(v128 a, v128 b) { return c_v128_ziplo_16(a, b); }
v128_ziphi_16(v128 a,v128 b)151 SIMD_INLINE v128 v128_ziphi_16(v128 a, v128 b) { return c_v128_ziphi_16(a, b); }
v128_ziplo_32(v128 a,v128 b)152 SIMD_INLINE v128 v128_ziplo_32(v128 a, v128 b) { return c_v128_ziplo_32(a, b); }
v128_ziphi_32(v128 a,v128 b)153 SIMD_INLINE v128 v128_ziphi_32(v128 a, v128 b) { return c_v128_ziphi_32(a, b); }
v128_ziplo_64(v128 a,v128 b)154 SIMD_INLINE v128 v128_ziplo_64(v128 a, v128 b) { return c_v128_ziplo_64(a, b); }
v128_ziphi_64(v128 a,v128 b)155 SIMD_INLINE v128 v128_ziphi_64(v128 a, v128 b) { return c_v128_ziphi_64(a, b); }
v128_zip_8(v64 a,v64 b)156 SIMD_INLINE v128 v128_zip_8(v64 a, v64 b) { return c_v128_zip_8(a, b); }
v128_zip_16(v64 a,v64 b)157 SIMD_INLINE v128 v128_zip_16(v64 a, v64 b) { return c_v128_zip_16(a, b); }
v128_zip_32(v64 a,v64 b)158 SIMD_INLINE v128 v128_zip_32(v64 a, v64 b) { return c_v128_zip_32(a, b); }
v128_unziplo_8(v128 a,v128 b)159 SIMD_INLINE v128 v128_unziplo_8(v128 a, v128 b) {
160 return c_v128_unziplo_8(a, b);
161 }
v128_unziphi_8(v128 a,v128 b)162 SIMD_INLINE v128 v128_unziphi_8(v128 a, v128 b) {
163 return c_v128_unziphi_8(a, b);
164 }
v128_unziplo_16(v128 a,v128 b)165 SIMD_INLINE v128 v128_unziplo_16(v128 a, v128 b) {
166 return c_v128_unziplo_16(a, b);
167 }
v128_unziphi_16(v128 a,v128 b)168 SIMD_INLINE v128 v128_unziphi_16(v128 a, v128 b) {
169 return c_v128_unziphi_16(a, b);
170 }
v128_unziplo_32(v128 a,v128 b)171 SIMD_INLINE v128 v128_unziplo_32(v128 a, v128 b) {
172 return c_v128_unziplo_32(a, b);
173 }
v128_unziphi_32(v128 a,v128 b)174 SIMD_INLINE v128 v128_unziphi_32(v128 a, v128 b) {
175 return c_v128_unziphi_32(a, b);
176 }
v128_unpack_u8_s16(v64 a)177 SIMD_INLINE v128 v128_unpack_u8_s16(v64 a) { return c_v128_unpack_u8_s16(a); }
v128_unpacklo_u8_s16(v128 a)178 SIMD_INLINE v128 v128_unpacklo_u8_s16(v128 a) {
179 return c_v128_unpacklo_u8_s16(a);
180 }
v128_unpackhi_u8_s16(v128 a)181 SIMD_INLINE v128 v128_unpackhi_u8_s16(v128 a) {
182 return c_v128_unpackhi_u8_s16(a);
183 }
v128_unpack_s8_s16(v64 a)184 SIMD_INLINE v128 v128_unpack_s8_s16(v64 a) { return c_v128_unpack_s8_s16(a); }
v128_unpacklo_s8_s16(v128 a)185 SIMD_INLINE v128 v128_unpacklo_s8_s16(v128 a) {
186 return c_v128_unpacklo_s8_s16(a);
187 }
v128_unpackhi_s8_s16(v128 a)188 SIMD_INLINE v128 v128_unpackhi_s8_s16(v128 a) {
189 return c_v128_unpackhi_s8_s16(a);
190 }
v128_pack_s32_s16(v128 a,v128 b)191 SIMD_INLINE v128 v128_pack_s32_s16(v128 a, v128 b) {
192 return c_v128_pack_s32_s16(a, b);
193 }
v128_pack_s32_u16(v128 a,v128 b)194 SIMD_INLINE v128 v128_pack_s32_u16(v128 a, v128 b) {
195 return c_v128_pack_s32_u16(a, b);
196 }
v128_pack_s16_u8(v128 a,v128 b)197 SIMD_INLINE v128 v128_pack_s16_u8(v128 a, v128 b) {
198 return c_v128_pack_s16_u8(a, b);
199 }
v128_pack_s16_s8(v128 a,v128 b)200 SIMD_INLINE v128 v128_pack_s16_s8(v128 a, v128 b) {
201 return c_v128_pack_s16_s8(a, b);
202 }
v128_unpack_u16_s32(v64 a)203 SIMD_INLINE v128 v128_unpack_u16_s32(v64 a) { return c_v128_unpack_u16_s32(a); }
v128_unpack_s16_s32(v64 a)204 SIMD_INLINE v128 v128_unpack_s16_s32(v64 a) { return c_v128_unpack_s16_s32(a); }
v128_unpacklo_u16_s32(v128 a)205 SIMD_INLINE v128 v128_unpacklo_u16_s32(v128 a) {
206 return c_v128_unpacklo_u16_s32(a);
207 }
v128_unpacklo_s16_s32(v128 a)208 SIMD_INLINE v128 v128_unpacklo_s16_s32(v128 a) {
209 return c_v128_unpacklo_s16_s32(a);
210 }
v128_unpackhi_u16_s32(v128 a)211 SIMD_INLINE v128 v128_unpackhi_u16_s32(v128 a) {
212 return c_v128_unpackhi_u16_s32(a);
213 }
v128_unpackhi_s16_s32(v128 a)214 SIMD_INLINE v128 v128_unpackhi_s16_s32(v128 a) {
215 return c_v128_unpackhi_s16_s32(a);
216 }
v128_shuffle_8(v128 a,v128 pattern)217 SIMD_INLINE v128 v128_shuffle_8(v128 a, v128 pattern) {
218 return c_v128_shuffle_8(a, pattern);
219 }
220
v128_cmpgt_s8(v128 a,v128 b)221 SIMD_INLINE v128 v128_cmpgt_s8(v128 a, v128 b) { return c_v128_cmpgt_s8(a, b); }
v128_cmplt_s8(v128 a,v128 b)222 SIMD_INLINE v128 v128_cmplt_s8(v128 a, v128 b) { return c_v128_cmplt_s8(a, b); }
v128_cmpeq_8(v128 a,v128 b)223 SIMD_INLINE v128 v128_cmpeq_8(v128 a, v128 b) { return c_v128_cmpeq_8(a, b); }
v128_cmpgt_s16(v128 a,v128 b)224 SIMD_INLINE v128 v128_cmpgt_s16(v128 a, v128 b) {
225 return c_v128_cmpgt_s16(a, b);
226 }
v128_cmplt_s16(v128 a,v128 b)227 SIMD_INLINE v128 v128_cmplt_s16(v128 a, v128 b) {
228 return c_v128_cmplt_s16(a, b);
229 }
v128_cmpeq_16(v128 a,v128 b)230 SIMD_INLINE v128 v128_cmpeq_16(v128 a, v128 b) { return c_v128_cmpeq_16(a, b); }
231
v128_cmpgt_s32(v128 a,v128 b)232 SIMD_INLINE v128 v128_cmpgt_s32(v128 a, v128 b) {
233 return c_v128_cmpgt_s32(a, b);
234 }
v128_cmplt_s32(v128 a,v128 b)235 SIMD_INLINE v128 v128_cmplt_s32(v128 a, v128 b) {
236 return c_v128_cmplt_s32(a, b);
237 }
v128_cmpeq_32(v128 a,v128 b)238 SIMD_INLINE v128 v128_cmpeq_32(v128 a, v128 b) { return c_v128_cmpeq_32(a, b); }
239
v128_shl_8(v128 a,unsigned int c)240 SIMD_INLINE v128 v128_shl_8(v128 a, unsigned int c) {
241 return c_v128_shl_8(a, c);
242 }
v128_shr_u8(v128 a,unsigned int c)243 SIMD_INLINE v128 v128_shr_u8(v128 a, unsigned int c) {
244 return c_v128_shr_u8(a, c);
245 }
v128_shr_s8(v128 a,unsigned int c)246 SIMD_INLINE v128 v128_shr_s8(v128 a, unsigned int c) {
247 return c_v128_shr_s8(a, c);
248 }
v128_shl_16(v128 a,unsigned int c)249 SIMD_INLINE v128 v128_shl_16(v128 a, unsigned int c) {
250 return c_v128_shl_16(a, c);
251 }
v128_shr_u16(v128 a,unsigned int c)252 SIMD_INLINE v128 v128_shr_u16(v128 a, unsigned int c) {
253 return c_v128_shr_u16(a, c);
254 }
v128_shr_s16(v128 a,unsigned int c)255 SIMD_INLINE v128 v128_shr_s16(v128 a, unsigned int c) {
256 return c_v128_shr_s16(a, c);
257 }
v128_shl_32(v128 a,unsigned int c)258 SIMD_INLINE v128 v128_shl_32(v128 a, unsigned int c) {
259 return c_v128_shl_32(a, c);
260 }
v128_shr_u32(v128 a,unsigned int c)261 SIMD_INLINE v128 v128_shr_u32(v128 a, unsigned int c) {
262 return c_v128_shr_u32(a, c);
263 }
v128_shr_s32(v128 a,unsigned int c)264 SIMD_INLINE v128 v128_shr_s32(v128 a, unsigned int c) {
265 return c_v128_shr_s32(a, c);
266 }
v128_shl_64(v128 a,unsigned int c)267 SIMD_INLINE v128 v128_shl_64(v128 a, unsigned int c) {
268 return c_v128_shl_64(a, c);
269 }
v128_shr_u64(v128 a,unsigned int c)270 SIMD_INLINE v128 v128_shr_u64(v128 a, unsigned int c) {
271 return c_v128_shr_u64(a, c);
272 }
v128_shr_s64(v128 a,unsigned int c)273 SIMD_INLINE v128 v128_shr_s64(v128 a, unsigned int c) {
274 return c_v128_shr_s64(a, c);
275 }
276
v128_shr_n_byte(v128 a,unsigned int n)277 SIMD_INLINE v128 v128_shr_n_byte(v128 a, unsigned int n) {
278 return c_v128_shr_n_byte(a, n);
279 }
v128_shl_n_byte(v128 a,unsigned int n)280 SIMD_INLINE v128 v128_shl_n_byte(v128 a, unsigned int n) {
281 return c_v128_shl_n_byte(a, n);
282 }
v128_shl_n_8(v128 a,unsigned int n)283 SIMD_INLINE v128 v128_shl_n_8(v128 a, unsigned int n) {
284 return c_v128_shl_n_8(a, n);
285 }
v128_shl_n_16(v128 a,unsigned int n)286 SIMD_INLINE v128 v128_shl_n_16(v128 a, unsigned int n) {
287 return c_v128_shl_n_16(a, n);
288 }
v128_shl_n_32(v128 a,unsigned int n)289 SIMD_INLINE v128 v128_shl_n_32(v128 a, unsigned int n) {
290 return c_v128_shl_n_32(a, n);
291 }
v128_shl_n_64(v128 a,unsigned int n)292 SIMD_INLINE v128 v128_shl_n_64(v128 a, unsigned int n) {
293 return c_v128_shl_n_64(a, n);
294 }
v128_shr_n_u8(v128 a,unsigned int n)295 SIMD_INLINE v128 v128_shr_n_u8(v128 a, unsigned int n) {
296 return c_v128_shr_n_u8(a, n);
297 }
v128_shr_n_u16(v128 a,unsigned int n)298 SIMD_INLINE v128 v128_shr_n_u16(v128 a, unsigned int n) {
299 return c_v128_shr_n_u16(a, n);
300 }
v128_shr_n_u32(v128 a,unsigned int n)301 SIMD_INLINE v128 v128_shr_n_u32(v128 a, unsigned int n) {
302 return c_v128_shr_n_u32(a, n);
303 }
v128_shr_n_u64(v128 a,unsigned int n)304 SIMD_INLINE v128 v128_shr_n_u64(v128 a, unsigned int n) {
305 return c_v128_shr_n_u64(a, n);
306 }
v128_shr_n_s8(v128 a,unsigned int n)307 SIMD_INLINE v128 v128_shr_n_s8(v128 a, unsigned int n) {
308 return c_v128_shr_n_s8(a, n);
309 }
v128_shr_n_s16(v128 a,unsigned int n)310 SIMD_INLINE v128 v128_shr_n_s16(v128 a, unsigned int n) {
311 return c_v128_shr_n_s16(a, n);
312 }
v128_shr_n_s32(v128 a,unsigned int n)313 SIMD_INLINE v128 v128_shr_n_s32(v128 a, unsigned int n) {
314 return c_v128_shr_n_s32(a, n);
315 }
v128_shr_n_s64(v128 a,unsigned int n)316 SIMD_INLINE v128 v128_shr_n_s64(v128 a, unsigned int n) {
317 return c_v128_shr_n_s64(a, n);
318 }
319
320 typedef uint32_t sad128_internal_u16;
v128_sad_u16_init()321 SIMD_INLINE sad128_internal_u16 v128_sad_u16_init() {
322 return c_v128_sad_u16_init();
323 }
v128_sad_u16(sad128_internal_u16 s,v128 a,v128 b)324 SIMD_INLINE sad128_internal_u16 v128_sad_u16(sad128_internal_u16 s, v128 a,
325 v128 b) {
326 return c_v128_sad_u16(s, a, b);
327 }
v128_sad_u16_sum(sad128_internal_u16 s)328 SIMD_INLINE uint32_t v128_sad_u16_sum(sad128_internal_u16 s) {
329 return c_v128_sad_u16_sum(s);
330 }
331
332 typedef uint64_t ssd128_internal_s16;
v128_ssd_s16_init()333 SIMD_INLINE ssd128_internal_s16 v128_ssd_s16_init() {
334 return c_v128_ssd_s16_init();
335 }
v128_ssd_s16(ssd128_internal_s16 s,v128 a,v128 b)336 SIMD_INLINE ssd128_internal_s16 v128_ssd_s16(ssd128_internal_s16 s, v128 a,
337 v128 b) {
338 return c_v128_ssd_s16(s, a, b);
339 }
v128_ssd_s16_sum(ssd128_internal_s16 s)340 SIMD_INLINE uint64_t v128_ssd_s16_sum(ssd128_internal_s16 s) {
341 return c_v128_ssd_s16_sum(s);
342 }
343
344 #endif // AOM_AOM_DSP_SIMD_V128_INTRINSICS_H_
345