| /external/executorch/backends/vulkan/runtime/graph/ops/impl/ |
| D | Softmax.cpp | 37 bool log_softmax) { in add_softmax_node() argument 63 if (log_softmax) { in add_softmax_node() 110 graph, args[0], args[1], args[3], /* log_softmax = */ false); in softmax() 113 void log_softmax(ComputeGraph& graph, const std::vector<ValueRef>& args) { in log_softmax() function 116 graph, args[0], args[1], args[3], /* log_softmax = */ true); in log_softmax() 121 VK_REGISTER_OP(aten._log_softmax.default, log_softmax);
|
| D | Softmax.h | 24 bool log_softmax);
|
| /external/ComputeLibrary/src/core/CL/cl_kernels/common/ |
| D | softmax_layer.cl | 78 #if defined(LOG_SOFTMAX) 81 #else // defined(LOG_SOFTMAX) 83 #endif // defined(LOG_SOFTMAX) 189 #ifdef LOG_SOFTMAX 194 #else /* LOG_SOFTMAX */ 199 #endif /* LOG_SOFTMAX */ 211 #ifdef LOG_SOFTMAX 215 #else /* LOG_SOFTMAX */ 219 #endif /* LOG_SOFTMAX */ 407 #ifdef LOG_SOFTMAX [all …]
|
| /external/tensorflow/tensorflow/lite/testing/op_tests/ |
| D | log_softmax.py | 15 """Test configs for log_softmax.""" 24 """Make a set of tests to do log_softmax.""" 32 """Build the log_softmax op testing graph.""" 38 out = tf.nn.log_softmax(input_tensor)
|
| /external/executorch/backends/arm/_passes/ |
| D | decompose_softmaxes_pass.py | 14 torch_softmax = (torch.ops.aten.softmax.int, torch.ops.aten.log_softmax.int) 22 log_softmax = (torch.ops.aten.log_softmax.int, exir_ops.edge.aten._log_softmax.default) variable 74 if op in log_softmax:
|
| /external/pytorch/torch/csrc/jit/tensorexpr/operators/ |
| D | softmax.cpp | 11 bool log_softmax) { in computeSoftmax() argument 27 // log_softmax(vi) = log(softmax(vi)) in computeSoftmax() 31 // log_softmax(vi) = vi - max(vi) - log(sum(exp(vi - max(vi)))) in computeSoftmax() 39 // - Final loop computes the log_softmax for every element in v. in computeSoftmax() 129 if (!log_softmax) { in computeSoftmax()
|
| D | softmax.h | 13 bool log_softmax);
|
| /external/pytorch/torch/masked/ |
| D | __init__.py | 15 log_softmax, 42 "log_softmax",
|
| /external/executorch/backends/arm/test/ops/ |
| D | test_logsoftmax.py | 52 .check(["torch.ops.aten.log_softmax.int"]) 73 .check_not(["torch.ops.aten.log_softmax.int"]) 97 .check_not(["torch.ops.aten.log_softmax.int"])
|
| /external/pytorch/torch/csrc/api/src/nn/modules/ |
| D | adaptive.cpp | 137 const Tensor cluster_logprob = F::log_softmax(cluster_output, 1); in forward() 159 const Tensor head_logprob = F::log_softmax(head_output, 1); in forward() 174 const Tensor head_logprob = F::log_softmax(head_output, 1); in _get_full_log_prob() 184 const Tensor cluster_logprob = F::log_softmax(cluster_output, 1); in _get_full_log_prob()
|
| /external/executorch/kernels/portable/cpu/ |
| D | op_log_softmax.cpp | 53 // calculate max in log_softmax dim. During log_softmax in log_softmax_out()
|
| /external/pytorch/torch/_refs/special/ |
| D | __init__.py | 37 "log_softmax", 206 def log_softmax( function 211 return torch.log_softmax(a=a, dim=dim, dtype=dtype) # type: ignore[call-overload]
|
| /external/pytorch/aten/src/ATen/native/metal/ops/ |
| D | MetalSoftmax.mm | 21 // TODO: [T87180544] Implement softmax/log_softmax in metal shaders 66 m.impl(TORCH_SELECTIVE_NAME("aten::log_softmax.int"), TORCH_FN(metal::log_softmax_int));
|
| /external/executorch/exir/tests/ |
| D | test_op_convert.py | 64 aten.log_softmax.int: aten.log_softmax.int_out,
|
| /external/pytorch/torch/nn/modules/ |
| D | adaptive.py | 238 cluster_logprob = F.log_softmax(cluster_output, dim=1) 252 head_logprob = F.log_softmax(head_output, dim=1) 264 head_logprob = F.log_softmax(head_output, dim=1) 270 cluster_logprob = F.log_softmax(cluster_output, dim=1)
|
| /external/pytorch/functorch/op_analysis/ |
| D | public_api | 421 log_softmax 607 masked.log_softmax
|
| /external/tensorflow/tensorflow/dtensor/mlir/expansions/ |
| D | softmax_spmd_expander.cc | 159 auto log_softmax = builder.create<mlir::TF::SubOp>( in ComputeLogSoftmax() local 161 return log_softmax.getResult(); in ComputeLogSoftmax() 169 bool log_softmax) { in ComputeShardedSoftmax() argument 177 if (log_softmax) { in ComputeShardedSoftmax() 574 // softmax is 1 and log_softmax is 0. in ExpandOp() 579 const mlir::Value log_softmax = in ExpandOp() local 599 features_zero, log_softmax) in ExpandOp()
|
| /external/executorch/backends/vulkan/runtime/graph/ops/glsl/ |
| D | softmax.yaml | 19 - NAME: log_softmax
|
| /external/pytorch/test/onnx/model_defs/ |
| D | mnist.py | 21 return F.log_softmax(x, dim=1)
|
| /external/tensorflow/tensorflow/python/ops/distributions/ |
| D | categorical.py | 317 nn_ops.log_softmax(self.logits) * self.probs, axis=-1) 342 delta_log_probs1 = (nn_ops.log_softmax(a.logits) - 343 nn_ops.log_softmax(b.logits))
|
| /external/pytorch/aten/src/ATen/native/ |
| D | SoftMax.cpp | 27 #include <ATen/ops/log_softmax.h> 499 Tensor log_softmax(const Tensor& input_, const int64_t dim_, std::optional<ScalarType> dtype) { in log_softmax() function 551 return at::log_softmax(input, dim, dtype); in special_log_softmax() 568 Tensor log_softmax(const Tensor& self, Dimname dim, std::optional<ScalarType> dtype) { in log_softmax() function 569 return at::log_softmax(self, dimname_to_position(self, dim), dtype); in log_softmax()
|
| /external/armnn/src/armnnTfLiteParser/test/ |
| D | LogSoftmax.cpp | 18 "operator_codes": [ { "builtin_code": "LOG_SOFTMAX" } ], in LogSoftmaxFixture()
|
| /external/pytorch/aten/src/ATen/native/vulkan/ops/ |
| D | Softmax.cpp | 192 Tensor log_softmax( in log_softmax() function 211 m.impl("_log_softmax", TORCH_FN(log_softmax)); in TORCH_LIBRARY_IMPL()
|
| /external/pytorch/docs/source/ |
| D | special.rst | 36 .. autofunction:: log_softmax
|
| /external/pytorch/torch/csrc/api/include/torch/nn/functional/ |
| D | activation.h | 307 inline Tensor log_softmax( in log_softmax() function 314 ret = input.log_softmax(dim); in log_softmax() 316 ret = input.log_softmax(dim, dtype); in log_softmax() 325 /// https://pytorch.org/docs/main/nn.functional.html#torch.nn.functional.log_softmax 334 /// F::log_softmax(input, LogSoftmaxFuncOptions(1)); 336 inline Tensor log_softmax( in log_softmax() function 339 return detail::log_softmax(input, options.dim(), options.dtype()); in log_softmax()
|