Home
last modified time | relevance | path

Searched refs:softmax (Results 1 – 25 of 127) sorted by relevance

123456

/external/tensorflow/tensorflow/core/kernels/
Dsoftmax_op_functor.h35 typename TTypes<T>::Matrix softmax, const bool log);
45 typename TTypes<T>::Matrix softmax, const bool log) { in Compute()
73 softmax.device(d) = shifted_logits; in Compute()
75 softmax.device(d) = (softmax - softmax.exp() in Compute()
86 softmax.device(d) = shifted_logits.exp(); in Compute()
88 softmax.device(d) = (softmax * softmax.sum(along_class) in Compute()
Dsoftmax_op.cc42 typename TTypes<T>::Matrix softmax, const bool log) { in operator ()()
43 SoftmaxEigenImpl<Device, T>::Compute(d, logits, softmax, log); in operator ()()
/external/libtextclassifier/native/lang_id/common/math/
Dsoftmax.cc75 std::vector<float> softmax; in ComputeSoftmax() local
76 softmax.reserve(scores.size()); in ComputeSoftmax()
78 return softmax; in ComputeSoftmax()
97 softmax.push_back(exp_scores[i] / denominator); in ComputeSoftmax()
99 return softmax; in ComputeSoftmax()
/external/tensorflow/tensorflow/core/kernels/sparse/
Dkernels_gpu.cu.cc361 T* softmax) { in CalculateRowSoftmax() argument
374 softmax[r_i] = exp_i; in CalculateRowSoftmax()
378 softmax[r_i] = softmax[r_i] / sum_exp; in CalculateRowSoftmax()
385 const T* logits, T* softmax) { in CSRSparseMatrixSoftmaxKernel2D() argument
392 softmax); in CSRSparseMatrixSoftmaxKernel2D()
410 const int* row_ptr, const T* logits, T* softmax) { in CSRSparseMatrixSoftmaxKernel3D() argument
427 softmax); in CSRSparseMatrixSoftmaxKernel3D()
494 const T* softmax, const int grad_softmax_begin, const int grad_softmax_end, in CalculateRowSoftmaxGrad() argument
514 sum_prod += ldg(softmax + i) * ldg(grad_softmax + j); in CalculateRowSoftmaxGrad()
541 gradient[i] = (ldg(grad_softmax + j) - sum_prod) * ldg(softmax + i); in CalculateRowSoftmaxGrad()
[all …]
Dsoftmax_op.cc76 functor::CSRSparseMatrixSoftmax<Device, T> softmax; in Compute() local
78 ctx, softmax(ctx, *logits_matrix, output_matrix.values().vec<T>())); in Compute()
212 OpKernelContext* ctx, const CSRSparseMatrix& softmax, \
/external/libtextclassifier/native/utils/math/
Dsoftmax.cc77 std::vector<float> softmax; in ComputeSoftmax() local
80 softmax.reserve(scores_size); in ComputeSoftmax()
99 softmax.push_back(exp_scores[i] / denominator); in ComputeSoftmax()
101 return softmax; in ComputeSoftmax()
/external/tensorflow/tensorflow/python/kernel_tests/
Dsoftmax_op_test.py49 softmax = e / np.reshape(np.sum(e, axis=dim), one_only_on_dim)
51 res = np.log(softmax)
53 res = softmax
68 tf_softmax = nn_ops.softmax(np_features, axis=dim, name=name)
215 op = nn_ops.softmax([[[1., 1., 1., 1.], [1., 2., 3., 4.]],
227 nn_ops.softmax(x, axis=0).eval()
235 nn_ops.softmax([1., 2., 3., 4.], axis=dim).eval()
242 nn_ops.softmax(ones, axis=2).eval()
255 y = nn_ops.softmax(x)
/external/tensorflow/tensorflow/core/api_def/base_api/
Dapi_def_Softmax.pbtxt10 name: "softmax"
15 summary: "Computes softmax activations."
19 $$softmax[i, j] = exp(logits[i, j]) / sum_j(exp(logits[i, j]))$$
Dapi_def_SparseMatrixSoftmax.pbtxt9 name: "softmax"
12 summary: "Calculates the softmax of a CSRSparseMatrix."
14 Calculate the softmax of the innermost dimensions of a SparseMatrix.
Dapi_def_SparseMatrixSoftmaxGrad.pbtxt5 name: "softmax"
10 description: "The gradient of `softmax`."
Dapi_def_SparseSoftmax.pbtxt28 summary: "Applies softmax to a batched N-D `SparseTensor`."
33 This op is equivalent to applying the normal `tf.nn.softmax()` to each innermost
38 (1) Applies `tf.nn.softmax()` to a densified view of each innermost submatrix
Dapi_def_LogSoftmax.pbtxt15 summary: "Computes log softmax activations."
/external/tensorflow/tensorflow/lite/toco/tflite/
Dop_version_test.cc45 std::unique_ptr<SoftmaxOperator> softmax(new SoftmaxOperator()); in TEST() local
48 softmax->inputs.push_back(softmax_input); in TEST()
49 softmax->outputs.push_back(softmax_output); in TEST()
53 model.operators.push_back(std::move(softmax)); in TEST()
/external/tensorflow/tensorflow/core/ops/compat/ops_history_v1/
DSoftmax.pbtxt8 name: "softmax"
30 name: "softmax"
DSparseMatrixSoftmax.pbtxt8 name: "softmax"
DSparseMatrixSoftmaxGrad.pbtxt4 name: "softmax"
/external/tensorflow/tensorflow/core/ops/
Dsparse_csr_matrix_ops.cc532 ShapeHandle softmax = sparse_matrix_shape_and_type.shape; in __anond96a6a670e02() local
533 TF_RETURN_IF_ERROR(c->WithRankAtLeast(softmax, 2, &softmax)); in __anond96a6a670e02()
534 TF_RETURN_IF_ERROR(c->WithRankAtMost(softmax, 3, &softmax)); in __anond96a6a670e02()
535 if (!c->RankKnown(softmax)) { in __anond96a6a670e02()
545 TF_RETURN_IF_ERROR(c->Merge(softmax, grad_softmax, &softmax)); in __anond96a6a670e02()
547 0, {ShapeAndType{softmax, sparse_matrix_shape_and_type.dtype}}); in __anond96a6a670e02()
/external/tensorflow/tensorflow/compiler/mlir/lite/tests/
Dpost-quantize.mlir29 …%5 = "tfl.softmax"(%4) {beta = 1.000000e+00 : f32} : (tensor<1x1001x!quant.uniform<u8:f32, 0.02352…
48 // CHECK-NEXT: %[[softmax:.*]] = "tfl.softmax"(%[[reshape]]) {beta = 1.000000e+00 : f32} : (tensor…
49 // CHECK-NEXT: return %[[softmax]] : tensor<1x1001x!quant.uniform<u8:f32, 3.906250e-03>>
/external/libtextclassifier/native/lang_id/common/flatbuffers/
Dembedding-network.fbs84 // hidden layer or the final (output / softmax) layer.
93 // is generally used for softmax classification. That's why we say that the
94 // last layer is the "softmax layer".
113 // Hidden layers, followed by the final (softmax) layer.
/external/tensorflow/tensorflow/examples/saved_model/integration_tests/
Dexport_text_rnn_model.py158 softmax = tf.nn.softmax(logits)
160 next_ids = tf.math.argmax(softmax, axis=1)
/external/libtextclassifier/native/lang_id/
Dlang-id.cc169 const std::vector<float> softmax = ComputeSoftmax(scores); in FindLanguages() local
170 const std::vector<int> indices = GetTopKIndices(max_results, softmax); in FindLanguages()
173 softmax[index]); in FindLanguages()
/external/tensorflow/tensorflow/python/keras/
Dactivations.py44 def softmax(x, axis=-1): function
72 return nn.softmax(x)
Dactivations_test.py71 f = keras.backend.function([x], [keras.activations.softmax(x)])
80 keras.activations.softmax(x)
84 f = keras.backend.function([x], [keras.activations.softmax(x)])
/external/tensorflow/tensorflow/core/grappler/costs/
Danalytical_cost_estimator_test.cc81 auto softmax = ops::Softmax(s.WithOpName("softmax"), logits); in CreateMiniGraph() local
82 auto lsm = ops::Log(s.WithOpName("lsm"), softmax); in CreateMiniGraph()
/external/tensorflow/tensorflow/python/ops/
Dnn_grad.py302 softmax = op.outputs[0]
303 sum_channels = math_ops.reduce_sum(grad_softmax * softmax, -1, keepdims=True)
304 return (grad_softmax - sum_channels) * softmax
321 softmax = math_ops.exp(op.outputs[0])
322 return grad - math_ops.reduce_sum(grad, -1, keepdims=True) * softmax
539 softmax = nn_ops.softmax(logits)
544 array_ops.expand_dims(softmax, 2)),
545 axis=1)) * softmax)

123456