Searched refs:pass_size (Results 1 – 4 of 4) sorted by relevance
93 auto pass_size = pass_vector->size(); in ProcessMatchedNodes() local94 for (size_t idx = 1; idx <= pass_size - 1; ++idx) { in ProcessMatchedNodes()112 if (pass_size < kOffset) { in ProcessMatchedNodes()115 for (size_t idx = pass_size - kOffset; idx > 0; --idx) { in ProcessMatchedNodes()117 if (idx == pass_size - kOffset) { in ProcessMatchedNodes()
46 int pass_size; member117 uint8_t *tmp = av_fast_realloc(ctx->pass_data, &ctx->pass_size, in get_stats()153 while (ret > 0 && ctx->pass_size - ctx->pass_pos > 0) { in set_stats()154 ret = rav1e_twopass_in(ctx->ctx, ctx->pass_data + ctx->pass_pos, ctx->pass_size); in set_stats()230 ctx->pass_size = (strlen(avctx->stats_in) * 3) / 4; in librav1e_encode_init()231 ctx->pass_data = av_malloc(ctx->pass_size); in librav1e_encode_init()238 ctx->pass_size = av_base64_decode(ctx->pass_data, avctx->stats_in, ctx->pass_size); in librav1e_encode_init()239 if (ctx->pass_size < 0) { in librav1e_encode_init()
179 pool->pass_size = pool->stride / n_passes; in genX()181 pool->snapshot_size = (pool->pass_size - data_offset) / 2; in genX()218 mi_imm(p * (uint64_t)pool->pass_size)); in genX()297 return query * (uint64_t)pool->stride + pass * (uint64_t)pool->pass_size; in khr_perf_query_availability_offset()303 return query * (uint64_t)pool->stride + pass * (uint64_t)pool->pass_size + in khr_perf_query_data_offset()
4637 uint32_t pass_size; member4649 return pool->pass_size * pass + 8; in khr_perf_query_preamble_offset()