/external/webrtc/modules/audio_processing/agc2/ |
D | clipping_predictor_unittest.cc | 46 ClippingPredictor& predictor) { in CallAnalyze() argument 48 predictor.Analyze(frame); in CallAnalyze() 57 ClippingPredictor& predictor) { in AnalyzeNonZeroCrestFactorAudio() argument 80 CallAnalyze(num_calls, frame, predictor); in AnalyzeNonZeroCrestFactorAudio() 88 const ClippingPredictor& predictor, in CheckChannelEstimatesWithValue() argument 92 EXPECT_THAT(predictor.EstimateClippedLevelStep( in CheckChannelEstimatesWithValue() 103 const ClippingPredictor& predictor) { in CheckChannelEstimatesWithoutValue() argument 106 EXPECT_EQ(predictor.EstimateClippedLevelStep(i, level, default_step, in CheckChannelEstimatesWithoutValue() 116 ClippingPredictor& predictor) { in AnalyzeZeroCrestFactorAudio() argument 130 CallAnalyze(num_calls, frame, predictor); in AnalyzeZeroCrestFactorAudio() [all …]
|
/external/libvpx/vp8/common/ |
D | mbpitch.c | 18 x->block[r * 4 + c].predictor = x->predictor + r * 4 * 16 + c * 4; in vp8_setup_block_dptrs() 24 x->block[16 + r * 2 + c].predictor = in vp8_setup_block_dptrs() 25 x->predictor + 256 + r * 4 * 8 + c * 4; in vp8_setup_block_dptrs() 31 x->block[20 + r * 2 + c].predictor = in vp8_setup_block_dptrs() 32 x->predictor + 320 + r * 4 * 8 + c * 4; in vp8_setup_block_dptrs()
|
D | blockd.h | 196 unsigned char *predictor; member 210 DECLARE_ALIGNED(16, unsigned char, predictor[384]);
|
D | reconinter.c | 62 unsigned char *pred_ptr = d->predictor; in vp8_build_inter_predictors_b() 138 unsigned char *upred_ptr = &x->predictor[256]; in vp8_build_inter16x16_predictors_mbuv() 139 unsigned char *vpred_ptr = &x->predictor[320]; in vp8_build_inter16x16_predictors_mbuv() 212 build_inter_predictors2b(x, d0, d0->predictor, 8, base_pre, pre_stride); in vp8_build_inter4x4_predictors_mbuv() 227 build_inter_predictors2b(x, d0, d0->predictor, 8, base_pre, pre_stride); in vp8_build_inter4x4_predictors_mbuv()
|
/external/arm-trusted-firmware/docs/security_advisories/ |
D | security-advisory-tfv-6.rst | 43 predictor as early as possible on entry into the secure world, before any branch 48 invalidate the branch predictor when entering EL3 by disabling and re-enabling 52 branch predictor when entering EL3 by temporarily dropping into AArch32 55 is not effective at invalidating the branch predictor on Cortex-A73/Cortex-A75. 58 its own branch predictor invalidation during context switch by issuing an SMC 59 (to execute firmware branch predictor invalidation), then there is a dependency 80 at invalidating the branch predictor on Cortex-A57, the drop into Secure-EL1 82 branch predictor. Hence this is a reasonable comparison. 112 effective at invalidating the branch predictor on Cortex-A15. For that CPU, set 114 branch predictor by performing an ``ICIALLU`` instruction.
|
/external/trusty/arm-trusted-firmware/docs/security_advisories/ |
D | security-advisory-tfv-6.rst | 43 predictor as early as possible on entry into the secure world, before any branch 48 invalidate the branch predictor when entering EL3 by disabling and re-enabling 52 branch predictor when entering EL3 by temporarily dropping into AArch32 55 is not effective at invalidating the branch predictor on Cortex-A73/Cortex-A75. 58 its own branch predictor invalidation during context switch by issuing an SMC 59 (to execute firmware branch predictor invalidation), then there is a dependency 80 at invalidating the branch predictor on Cortex-A57, the drop into Secure-EL1 82 branch predictor. Hence this is a reasonable comparison. 112 effective at invalidating the branch predictor on Cortex-A15. For that CPU, set 114 branch predictor by performing an ``ICIALLU`` instruction.
|
/external/executorch/backends/xnnpack/test/models/ |
D | emformer_rnnt.py | 63 predictor = self.Predictor() 65 Tester(predictor, predictor.get_example_inputs())
|
/external/cronet/stable/net/websockets/ |
D | websocket_deflate_predictor_impl_test.cc | 19 WebSocketDeflatePredictorImpl predictor; in TEST() local 23 Result result = predictor.Predict(frames, 0); in TEST()
|
D | websocket_deflate_stream.h | 48 std::unique_ptr<WebSocketDeflatePredictor> predictor);
|
/external/cronet/tot/net/websockets/ |
D | websocket_deflate_predictor_impl_test.cc | 19 WebSocketDeflatePredictorImpl predictor; in TEST() local 23 Result result = predictor.Predict(frames, 0); in TEST()
|
/external/pdfium/core/fxcodec/flate/ |
D | flatemodule.h | 30 int predictor, 39 int predictor,
|
D | flatemodule.cpp | 538 static PredictorType GetPredictor(int predictor) { in GetPredictor() argument 539 if (predictor >= 10) in GetPredictor() 541 if (predictor == 2) in GetPredictor() 611 PredictorType predictor, 643 PredictorType predictor, in FlatePredictorScanlineDecoder() argument 648 m_Predictor(predictor) { in FlatePredictorScanlineDecoder() 766 int predictor, in CreateDecoder() argument 770 PredictorType predictor_type = GetPredictor(predictor); in CreateDecoder() 785 int predictor, in FlateOrLZWDecode() argument 792 PredictorType predictor_type = GetPredictor(predictor); in FlateOrLZWDecode()
|
/external/pdfium/third_party/libtiff/ |
D | tif_predict.c | 66 switch (sp->predictor) /* no differencing */ in PredictorSetup() 104 sp->predictor); in PredictorSetup() 134 if (sp->predictor == 2) in PredictorSetupDecode() 192 else if (sp->predictor == 3) in PredictorSetupDecode() 234 if (sp->predictor == 2) in PredictorSetupEncode() 292 else if (sp->predictor == 3) in PredictorSetupEncode() 939 sp->predictor = (uint16_t)va_arg(ap, uint16_vap); in PredictorVSetField() 959 *va_arg(ap, uint16_t *) = (uint16_t)sp->predictor; in PredictorVGetField() 975 switch (sp->predictor) in PredictorPrintDir() 987 fprintf(fd, "%d (0x%x)\n", sp->predictor, sp->predictor); in PredictorPrintDir() [all …]
|
D | tif_predict.h | 44 int predictor; /* predictor tag value */ member
|
/external/libvpx/vp8/encoder/ |
D | encodeintra.c | 54 vp8_intra4x4_predict(Above, yleft, dst_stride, b->bmi.as_mode, b->predictor, in vp8_encode_intra4x4block() 64 vp8_short_idct4x4llm(b->dqcoeff, b->predictor, 16, dst, dst_stride); in vp8_encode_intra4x4block() 66 vp8_dc_only_idct_add(b->dqcoeff[0], b->predictor, 16, dst, dst_stride); in vp8_encode_intra4x4block()
|
D | temporal_filter.c | 203 DECLARE_ALIGNED(16, unsigned char, predictor[16 * 16 + 8 * 8 + 8 * 8]); in vp8_temporal_filter_iterate_c() 271 mbd->block[0].bmi.mv.as_mv.col, predictor); in vp8_temporal_filter_iterate_c() 275 predictor, 16, strength, filter_weight, in vp8_temporal_filter_iterate_c() 279 predictor + 256, 8, strength, filter_weight, in vp8_temporal_filter_iterate_c() 283 predictor + 320, 8, strength, filter_weight, in vp8_temporal_filter_iterate_c()
|
D | rdopt.c | 481 mb->block[0].src_stride, mb->e_mbd.predictor, 16); in macro_block_yrd() 511 static void copy_predictor(unsigned char *dst, const unsigned char *predictor) { in copy_predictor() argument 512 const unsigned int *p = (const unsigned int *)predictor; in copy_predictor() 553 vp8_intra4x4_predict(Above, yleft, dst_stride, mode, b->predictor, 16, in rd_pick_intra4x4block() 576 copy_predictor(best_predictor, b->predictor); in rd_pick_intra4x4block() 662 xd->predictor, 16); in rd_pick_intra16x16mby_mode() 713 x->src.uv_stride, &x->e_mbd.predictor[256], in rd_inter16x16_uv() 714 &x->e_mbd.predictor[320], 8); in rd_inter16x16_uv() 732 x->src.uv_stride, &x->e_mbd.predictor[256], in rd_inter4x4_uv() 733 &x->e_mbd.predictor[320], 8); in rd_inter4x4_uv() [all …]
|
/external/dng_sdk/source/ |
D | dng_lossless_jpeg.cpp | 2519 int32 predictor = QuickPredict (col, in DecodeImage() local 2526 curRowBuf [col] [curComp] = (ComponentType) (d + predictor); in DecodeImage() 2982 int32 predictor [4]; in FreqCountSet() local 2988 predictor [channel] = 1 << (fSrcBitDepth - 1); in FreqCountSet() 2991 predictor [channel] = sPtr [channel - fSrcRowStep]; in FreqCountSet() 3000 int32 pred0 = predictor [0]; in FreqCountSet() 3001 int32 pred1 = predictor [1]; in FreqCountSet() 3040 int16 diff = (int16) (pixel - predictor [channel]); in FreqCountSet() 3044 predictor [channel] = pixel; in FreqCountSet() 3088 int32 predictor [4]; in HuffEncode() local [all …]
|
/external/pdfium/core/fpdfapi/parser/ |
D | fpdf_parser_decode.cpp | 339 int predictor = 0; in CreateFlateDecoder() local 344 predictor = pParams->GetIntegerFor("Predictor"); in CreateFlateDecoder() 352 predictor, Colors, BitsPerComponent, in CreateFlateDecoder() 360 int predictor = 0; in FlateOrLZWDecode() local 366 predictor = pParams->GetIntegerFor("Predictor"); in FlateOrLZWDecode() 375 predictor, Colors, BitsPerComponent, in FlateOrLZWDecode()
|
/external/libvpx/vp9/encoder/ |
D | vp9_temporal_filter.c | 870 uint8_t *predictor; in vp9_temporal_filter_iterate_row_c() local 872 DECLARE_ALIGNED(16, uint8_t, predictor[BLK_PELS * 3]); in vp9_temporal_filter_iterate_row_c() 883 predictor = CONVERT_TO_BYTEPTR(predictor16); in vp9_temporal_filter_iterate_row_c() 885 predictor = predictor8; in vp9_temporal_filter_iterate_row_c() 1010 mb_uv_width, mb_uv_height, ref_mv.row, ref_mv.col, predictor, scale, in vp9_temporal_filter_iterate_row_c() 1019 CONVERT_TO_SHORTPTR(predictor), BW, in vp9_temporal_filter_iterate_row_c() 1022 CONVERT_TO_SHORTPTR(predictor + BLK_PELS), in vp9_temporal_filter_iterate_row_c() 1023 CONVERT_TO_SHORTPTR(predictor + (BLK_PELS << 1)), mb_uv_width, BW, in vp9_temporal_filter_iterate_row_c() 1031 f->y_buffer + mb_y_offset, f->y_stride, predictor, BW, in vp9_temporal_filter_iterate_row_c() 1033 f->uv_stride, predictor + BLK_PELS, predictor + (BLK_PELS << 1), in vp9_temporal_filter_iterate_row_c() [all …]
|
D | vp9_tpl_model.c | 798 YV12_BUFFER_CONFIG *ref_frame[], uint8_t *predictor, in mode_estimation() argument 839 dst = &predictor[0]; in mode_estimation() 896 ref_frame[rf_idx]->y_stride, CONVERT_TO_SHORTPTR(&predictor[0]), bw, in mode_estimation() 901 xd->cur_buf->y_stride, &predictor[0], bw, xd->bd); in mode_estimation() 907 ref_frame[rf_idx]->y_stride, &predictor[0], bw, &mv.as_mv, sf, bw, bh, in mode_estimation() 911 xd->cur_buf->y_stride, &predictor[0], bw); in mode_estimation() 917 ref_frame[rf_idx]->y_stride, &predictor[0], bw, in mode_estimation() 922 xd->cur_buf->y_stride, &predictor[0], bw); in mode_estimation() 1380 uint8_t *predictor; in mc_flow_dispenser() local 1382 DECLARE_ALIGNED(16, uint8_t, predictor[32 * 32 * 3]); in mc_flow_dispenser() [all …]
|
/external/pytorch/test/mobile/custom_build/ |
D | CMakeLists.txt | 11 add_executable(Predictor predictor.cpp)
|
/external/libaom/doc/ |
D | AlgorithmDescription.md | 144 intra modes include `SMOOTH_V`, `SMOOTH_H`, `SMOOTH` and `PAETH predictor`. 153 In `PAETH predictor` mode, the prediction for each sample is assigned as one 155 value closest to the Paeth predictor value, i.e., T + L -TL. The samples used in 156 `PAETH predictor` are illustrated in below figure. 159 intra" width="300" /> <figcaption>Figure 5: Paeth predictor</figcaption> 221 predictor that will be described in the next subsection will be checked second, 225 non-adjacent spatial neighbors are not used for deriving the MV predictor. 229 In addition to spatial neighboring blocks, MV predictor can be also derived 230 using co-located blocks of reference pictures, namely temporal MV predictor. To 231 generate temporal MV predictor, the MVs of reference frames are first stored [all …]
|
/external/cronet/stable/third_party/llvm-libc/src/benchmarks/ |
D | README.md | 38 This is the preferred mode to use. The function parameters are randomized and the branch predictor … 66 …izes. Because it exercises the same size over and over again the branch predictor can kick in. It …
|
/external/cronet/tot/third_party/llvm-libc/src/benchmarks/ |
D | README.md | 38 This is the preferred mode to use. The function parameters are randomized and the branch predictor … 66 …izes. Because it exercises the same size over and over again the branch predictor can kick in. It …
|