Home
last modified time | relevance | path

Searched refs:as_float_unchecked (Results 1 – 10 of 10) sorted by relevance

/external/pytorch/c10/core/
DSymFloat.cpp18 return base->wrap_float(as_float_unchecked()); in wrap_node()
33 a = common->wrap_float(a_.as_float_unchecked()); in normalize_symfloats()
36 b = common->wrap_float(b_.as_float_unchecked()); in normalize_symfloats()
140 os << s.as_float_unchecked(); in operator <<()
DSymFloat.h102 double as_float_unchecked() const { in as_float_unchecked() function
DScalar.h370 v.d = sd.as_float_unchecked(); in Scalar()
/external/pytorch/aten/src/ATen/native/transformers/cuda/
Dattention_backward.cu80 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _flash_attention_backward()
219 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _scaled_dot_product_cudnn_attention_backward_cuda()
422 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_backward()
541 p.scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_backward()
Dattention.cu766 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _scaled_dot_product_cudnn_attention_cuda()
862 sdp::calculate_scale(query, scale).as_float_unchecked(); in _flash_attention_forward()
1103 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_forward()
1225 p.scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_forward()
/external/pytorch/torch/csrc/utils/
Dpybind.cpp91 return py::cast(si.as_float_unchecked()).release(); in cast()
/external/pytorch/aten/src/ATen/native/mps/operations/
DAttention.mm58 auto scale_factor = sdp::calculate_scale(query, scale).as_float_unchecked();
/external/pytorch/aten/src/ATen/native/cpu/
DFlashAttentionKernel.cpp198 sdp::calculate_scale(query, scale).as_float_unchecked(); in cpu_flash_attention()
438 sdp::calculate_scale(query, scale).as_float_unchecked(); in cpu_flash_attention_backward()
/external/pytorch/aten/src/ATen/native/transformers/
Dattention.cpp681 …d, value_padded, dropout_p, is_causal, false /*return_debug_mask*/, og_scale.as_float_unchecked()); in scaled_dot_product_attention()
/external/pytorch/aten/src/ATen/core/
Divalue.h611 payload.u.as_double = i.as_float_unchecked(); in IValue()