Home
last modified time | relevance | path

Searched refs:vp8 (Results 1 – 25 of 162) sorted by relevance

1234567

/external/libvpx/
DAndroid.bp36 "libvpx/vp8/common/alloccommon.c",
37 "libvpx/vp8/common/arm/loopfilter_arm.c",
38 "libvpx/vp8/common/arm/neon/bilinearpredict_neon.c",
39 "libvpx/vp8/common/arm/neon/copymem_neon.c",
40 "libvpx/vp8/common/arm/neon/dc_only_idct_add_neon.c",
41 "libvpx/vp8/common/arm/neon/dequant_idct_neon.c",
42 "libvpx/vp8/common/arm/neon/dequantizeb_neon.c",
43 "libvpx/vp8/common/arm/neon/idct_blk_neon.c",
44 "libvpx/vp8/common/arm/neon/iwalsh_neon.c",
45 "libvpx/vp8/common/arm/neon/loopfiltersimplehorizontaledge_neon.c",
[all …]
/external/libvpx/libvpx/test/
Dvpx_temporal_svc_encoder.sh96 vpx_tsvc_encoder vp8 "${output_basename}" 0 200 || return 1
105 vpx_tsvc_encoder vp8 "${output_basename}" 1 200 400 || return 1
114 vpx_tsvc_encoder vp8 "${output_basename}" 2 200 400 || return 1
123 vpx_tsvc_encoder vp8 "${output_basename}" 3 200 400 600 || return 1
132 vpx_tsvc_encoder vp8 "${output_basename}" 4 200 400 600 || return 1
141 vpx_tsvc_encoder vp8 "${output_basename}" 5 200 400 600 || return 1
150 vpx_tsvc_encoder vp8 "${output_basename}" 6 200 400 600 || return 1
159 vpx_tsvc_encoder vp8 "${output_basename}" 7 200 400 600 800 1000 || return 1
168 vpx_tsvc_encoder vp8 "${output_basename}" 8 200 400 || return 1
177 vpx_tsvc_encoder vp8 "${output_basename}" 9 200 400 600 || return 1
[all …]
Dvpxenc.sh131 --codec=vp8 \
148 --codec=vp8 \
164 $(vpxenc_rt_params vp8) \
178 --codec=vp8 \
197 --codec=vp8 \
215 --codec=vp8 \
/external/webrtc/modules/rtp_rtcp/source/
Dvideo_rtp_depacketizer_vp8.cc50 int ParseVP8Descriptor(RTPVideoHeaderVP8* vp8, in ParseVP8Descriptor() argument
57 vp8->nonReference = (*data & 0x20) ? true : false; // N bit in ParseVP8Descriptor()
58 vp8->beginningOfPartition = (*data & 0x10) ? true : false; // S bit in ParseVP8Descriptor()
59 vp8->partitionId = (*data & 0x0F); // PartID field in ParseVP8Descriptor()
85 vp8->pictureId = (*data & 0x7F); in ParseVP8Descriptor()
92 vp8->pictureId = (vp8->pictureId << 8) + *data; in ParseVP8Descriptor()
103 vp8->tl0PicIdx = *data; in ParseVP8Descriptor()
114 vp8->temporalIdx = ((*data >> 6) & 0x03); in ParseVP8Descriptor()
115 vp8->layerSync = (*data & 0x20) ? true : false; // Y bit in ParseVP8Descriptor()
118 vp8->keyIdx = *data & 0x1F; in ParseVP8Descriptor()
/external/webrtc/rtc_base/experiments/
Dbalanced_degradation_settings.cc121 if (!IsValid(configs[i].vp8, configs[i - 1].vp8) || in IsValid()
130 if (!IsValidConfig(config.vp8) || !IsValidConfig(config.vp9) || in IsValid()
155 low = config.vp8.GetQpLow(); in GetThresholds()
156 high = config.vp8.GetQpHigh(); in GetThresholds()
195 fps = config->vp8.GetFps(); in GetFps()
227 kbps = config->vp8.GetKbps(); in GetKbps()
260 kbps_res = config->vp8.GetKbpsRes(); in GetKbpsRes()
318 CodecTypeSpecific vp8, in Config() argument
328 vp8(vp8), in Config()
344 [](Config* c) { return &c->vp8.qp_low; }), in BalancedDegradationSettings()
[all …]
Dbalanced_degradation_settings_unittest.cc371 VideoCodecType vp8 = kVideoCodecVP8; in TEST() local
377 EXPECT_TRUE(s.CanAdaptUp(vp8, 1000, 0)); // No bitrate provided. in TEST()
378 EXPECT_FALSE(s.CanAdaptUp(vp8, 1000, 79000)); in TEST()
379 EXPECT_TRUE(s.CanAdaptUp(vp8, 1000, 80000)); in TEST()
380 EXPECT_TRUE(s.CanAdaptUp(vp8, 1001, 1)); // No limit configured. in TEST()
381 EXPECT_FALSE(s.CanAdaptUp(vp8, 3000, 89000)); in TEST()
382 EXPECT_TRUE(s.CanAdaptUp(vp8, 3000, 90000)); in TEST()
383 EXPECT_TRUE(s.CanAdaptUp(vp8, 3001, 1)); // No limit. in TEST()
407 VideoCodecType vp8 = kVideoCodecVP8; in TEST() local
413 EXPECT_TRUE(s.CanAdaptUpResolution(vp8, 1000, 0)); // No bitrate provided. in TEST()
[all …]
Dbalanced_degradation_settings.h63 CodecTypeSpecific vp8,
71 kbps_res == o.kbps_res && fps_diff == o.fps_diff && vp8 == o.vp8 &&
104 CodecTypeSpecific vp8; member
/external/libvpx/libvpx/vpx/
Dvpx_codec.mk14 API_SRCS-$(CONFIG_VP8_ENCODER) += vp8.h
16 API_DOC_SRCS-$(CONFIG_VP8_ENCODER) += vp8.h
19 API_SRCS-$(CONFIG_VP8_DECODER) += vp8.h
21 API_DOC_SRCS-$(CONFIG_VP8_DECODER) += vp8.h
/external/XNNPACK/src/f32-raddexpminusmax/gen/
Davx512f-p5-scalef-x144.c106 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144() local
116 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
126 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
136 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
146 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
159 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
Davx512f-p5-scalef-x144-acc3.c108 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3() local
118 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
128 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
138 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
148 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
161 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
Davx512f-p5-scalef-x160-acc5.c115 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5() local
126 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
148 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
159 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
173 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
Davx512f-p5-scalef-x160.c111 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160() local
122 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
133 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
144 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
155 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
169 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
Davx512f-p5-scalef-x160-acc2.c112 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2() local
123 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
134 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
156 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
170 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Davx512f-p5-scalef-x144.c107 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144() local
117 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
127 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
147 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
160 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
Davx512f-p5-scalef-x144-acc3.c109 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3() local
119 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
129 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
139 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
149 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
162 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
Davx512f-p5-scalef-x160-acc2.c113 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2() local
124 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
135 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
146 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
157 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
171 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
Davx512f-p5-scalef-x160.c112 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160() local
123 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
134 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
156 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
170 const __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx512f-p5-scalef-x144.c97 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144() local
107 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
117 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
127 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
153 __m512 vf8 = _mm512_mul_ps(vp8, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
Davx512f-p5-scalef-x160.c101 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160() local
112 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
123 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
134 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
162 __m512 vf8 = _mm512_mul_ps(vp8, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx512f-p5-scalef-x144.c107 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144() local
117 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
127 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
147 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
160 __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
Davx512f-p5-scalef-x160.c112 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160() local
123 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
134 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
156 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
170 __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
Davx512f-p5-scalef-x176.c117 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176() local
129 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
141 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
153 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
165 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
180 __m512 vf8 = _mm512_scalef_ps(vp8, vn8); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
/external/autotest/server/site_tests/native_Benchmarks/
Dnative_Benchmarks.py15 from vp8 import vp8
20 'vp8': vp8,
/external/XNNPACK/src/f32-raddextexp/gen/
Davx512f-p5-scalef-x144.c97 __m512 vp8 = _mm512_fmadd_ps(vc5, vt8, vc4); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144() local
107 vp8 = _mm512_fmadd_ps(vp8, vt8, vc3); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
117 vp8 = _mm512_fmadd_ps(vp8, vt8, vc2); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
127 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc0); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
180 vaccv0 = _mm512_add_ps(vaccv0, _mm512_scalef_ps(vp8, vdelta_e8)); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
/external/XNNPACK/src/f32-velu/gen/
Dvelu-avx2-rr1-p6-x72.c111 __m256 vp8 = _mm256_fmadd_ps(vc6, vt8, vc5); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72() local
121 vp8 = _mm256_fmadd_ps(vp8, vt8, vc4); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72()
131 vp8 = _mm256_fmadd_ps(vp8, vt8, vc3); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72()
141 vp8 = _mm256_fmadd_ps(vp8, vt8, vc2); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72()
159 vp8 = _mm256_mul_ps(vp8, vt8); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72()
179 vp8 = _mm256_fmadd_ps(vp8, vt8, vt8); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72()
197 const __m256 ve8 = _mm256_fmadd_ps(vp8, valpha, vs8); in xnn_f32_velu_ukernel__avx2_rr1_p6_x72()

1234567