Searched refs:as_float_unchecked (Results 1 – 10 of 10) sorted by relevance
/external/pytorch/c10/core/ |
D | SymFloat.cpp | 18 return base->wrap_float(as_float_unchecked()); in wrap_node() 33 a = common->wrap_float(a_.as_float_unchecked()); in normalize_symfloats() 36 b = common->wrap_float(b_.as_float_unchecked()); in normalize_symfloats() 140 os << s.as_float_unchecked(); in operator <<()
|
D | SymFloat.h | 102 double as_float_unchecked() const { in as_float_unchecked() function
|
D | Scalar.h | 370 v.d = sd.as_float_unchecked(); in Scalar()
|
/external/pytorch/aten/src/ATen/native/transformers/cuda/ |
D | attention_backward.cu | 80 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _flash_attention_backward() 219 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _scaled_dot_product_cudnn_attention_backward_cuda() 422 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_backward() 541 p.scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_backward()
|
D | attention.cu | 766 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _scaled_dot_product_cudnn_attention_cuda() 862 sdp::calculate_scale(query, scale).as_float_unchecked(); in _flash_attention_forward() 1103 const auto softmax_scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_forward() 1225 p.scale = sdp::calculate_scale(query, scale).as_float_unchecked(); in _efficient_attention_forward()
|
/external/pytorch/torch/csrc/utils/ |
D | pybind.cpp | 91 return py::cast(si.as_float_unchecked()).release(); in cast()
|
/external/pytorch/aten/src/ATen/native/mps/operations/ |
D | Attention.mm | 58 auto scale_factor = sdp::calculate_scale(query, scale).as_float_unchecked();
|
/external/pytorch/aten/src/ATen/native/cpu/ |
D | FlashAttentionKernel.cpp | 198 sdp::calculate_scale(query, scale).as_float_unchecked(); in cpu_flash_attention() 438 sdp::calculate_scale(query, scale).as_float_unchecked(); in cpu_flash_attention_backward()
|
/external/pytorch/aten/src/ATen/native/transformers/ |
D | attention.cpp | 681 …d, value_padded, dropout_p, is_causal, false /*return_debug_mask*/, og_scale.as_float_unchecked()); in scaled_dot_product_attention()
|
/external/pytorch/aten/src/ATen/core/ |
D | ivalue.h | 611 payload.u.as_double = i.as_float_unchecked(); in IValue()
|