Home
last modified time | relevance | path

Searched refs:input_logical_rank (Results 1 – 2 of 2) sorted by relevance

/external/pytorch/aten/src/ATen/functorch/
DBatchRulesNorm.cpp109 const auto input_logical_rank = rankWithoutBatchDim(input, input_bdim); in batch_norm_batch_rule() local
111 weight_ = padRight(weight_, weight_bdim, input_logical_rank); in batch_norm_batch_rule()
510 const auto input_logical_rank = rankWithoutBatchDim(input, input_bdim); in native_layer_norm_batch_rule() local
519 weight_ = maybePadToLogicalRank(weight_, /*has_bdim*/weight_bdim, input_logical_rank); in native_layer_norm_batch_rule()
DBatchRulesLinearAlgebra.cpp457 auto input_logical_rank = rankWithoutBatchDim(input, input_bdim); in atol_rtol_tensor_batch_rule() local
459 TORCH_CHECK(input_logical_rank >= 2, in atol_rtol_tensor_batch_rule()
464 const auto input_logical_num_bdims = input_logical_rank - 2; in atol_rtol_tensor_batch_rule()