Home
last modified time | relevance | path

Searched refs:av_clip_intp2 (Results 1 – 19 of 19) sorted by relevance

/third_party/ffmpeg/libavcodec/
Daptx.c446 … reconstructed_sample = av_clip_intp2(reconstructed_difference + prediction->predicted_sample, 23); in aptx_prediction_filtering()
447 …predictor = av_clip_intp2((MUL64(prediction->s_weight[0], prediction->previous_reconstructed_sampl… in aptx_prediction_filtering()
459 prediction->predicted_difference = av_clip_intp2(predicted_difference >> 22, 23); in aptx_prediction_filtering()
460 prediction->predicted_sample = av_clip_intp2(predictor + prediction->predicted_difference, 23); in aptx_prediction_filtering()
Daptxenc.c47 *low_subband_output = av_clip_intp2(subbands[0] + subbands[1], 23); in aptx_qmf_polyphase_analysis()
48 *high_subband_output = av_clip_intp2(subbands[0] - subbands[1], 23); in aptx_qmf_polyphase_analysis()
120 …dithered_sample = rshift64_clip24(MUL64(dither, interval) + ((int64_t)av_clip_intp2(mean + d, 23) … in aptx_quantize_difference()
142 …int32_t diff = av_clip_intp2(subband_samples[subband] - channel->prediction[subband].predicted_sam… in aptx_encode_channel()
Dg722dec.c115 rlow = av_clip_intp2((c->band[0].scale_factor * quantizer_table[ilow] >> 10) in g722_decode_frame()
121 rhigh = av_clip_intp2(dhigh + c->band[1].s_predictor, 14); in g722_decode_frame()
Dmotionpixels.c241 p.v = av_clip_intp2(p.v, 5); in mp_decode_line()
243 p.u = av_clip_intp2(p.u, 5); in mp_decode_line()
272 p.v = av_clip_intp2(p.v, 5); in mp_decode_frame_helper()
274 p.u = av_clip_intp2(p.u, 5); in mp_decode_frame_helper()
Ddcamath.h54 static inline int32_t clip23(int32_t a) { return av_clip_intp2(a, 23); } in clip23()
Daptx.h126 return av_clip_intp2(rshift##size(value, shift), 23); \
Dg722enc.c224 decoded = av_clip_intp2((cur_node->state.scale_factor * in g722_encode_trellis()
283 decoded = av_clip_intp2(dhigh + cur_node->state.s_predictor, 14); in g722_encode_trellis()
Dadxenc.c84 d = av_clip_intp2(ROUNDED_DIV(d, scale), 3); in adx_encode()
Dadpcm.c160 c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata ), 18); in adpcm_decode_init()
161 c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 4), 18); in adpcm_decode_init()
166 c->status[0].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 0), 18); in adpcm_decode_init()
168 c->status[1].predictor = av_clip_intp2(AV_RL32(avctx->extradata + 8), 18); in adpcm_decode_init()
435 c->predictor = av_clip_intp2(predictor, 11); in adpcm_ima_oki_expand_nibble()
1918 prev = av_clip_intp2((prev + 0x20) >> 6, 21); in adpcm_decode_frame()
Dtakdec.c494 v = (av_clip_intp2(v >> filter_quant, 13) * (1 << dshift)) - (unsigned)*decoded; in decode_subframe()
656 v = av_clip_intp2(v >> 10, 13) * (1U << dshift) - *p1; in decorrelate()
Dg726.c221 fa1 = av_clip_intp2((-c->a[0]*c->pk[0]*pk0)>>5, 8); in g726_decode()
Dhevc_mvs.c122 scale_factor = av_clip_intp2((tb * tx + 32) >> 6, 12); in mv_scale()
Dh264_direct.c58 return av_clip_intp2((tb * tx + 32) >> 6, 10); in get_scale_factor()
Dadpcmenc.c242 nibble = av_clip_intp2(nibble, 3) & 0x0F; in adpcm_ms_compress_sample()
Dvp9dsp_template.c1834 int f = av_clip_intp2(p1 - q1, BIT_DEPTH - 1), f1, f2; in loop_filter()
1835 f = av_clip_intp2(3 * (q0 - p0) + f, BIT_DEPTH - 1); in loop_filter()
1843 int f = av_clip_intp2(3 * (q0 - p0), BIT_DEPTH - 1), f1, f2; in loop_filter()
Dilbcdec.c1337 signal[i] = av_clip_intp2(tmp + 1024, 26) >> 11; in hp_output()
/third_party/ffmpeg/tests/ref/fate/
Dsource24 Use of av_clip() where av_clip_intp2() could be used:
/third_party/ffmpeg/libavutil/arm/
Dintmath.h65 #define av_clip_intp2 av_clip_intp2_arm macro
/third_party/ffmpeg/libavutil/
Dcommon.h567 #ifndef av_clip_intp2
568 # define av_clip_intp2 av_clip_intp2_c macro