/external/tensorflow/tensorflow/python/kernel_tests/ |
D | gradient_correctness_test.py | 36 with test_util.AbstractGradientTape(use_tape=use_tape) as tape: 38 tape.watch(x) 42 grads = tape.gradient([yexp, yexplog], [x]) 51 with test_util.AbstractGradientTape(use_tape=use_tape) as tape: 52 tape.watch(x) 53 dx_dx = tape.gradient(x, x) 59 with test_util.AbstractGradientTape(use_tape=use_tape) as tape: 60 tape.watch(x) 61 dx_dx = tape.gradient(x, x) 66 with test_util.AbstractGradientTape(use_tape=use_tape) as tape: [all …]
|
D | map_ops_test.py | 190 with backprop.GradientTape() as tape: 194 tape.watch(v) 198 g = tape.gradient(l, v) 202 with backprop.GradientTape(persistent=True) as tape: 210 tape.watch(v) 211 tape.watch(v2) 212 tape.watch(v3) 219 g = tape.gradient(l * 5, v) 220 g2 = tape.gradient(l2 * 6, v2) 221 g3 = tape.gradient(l3 * 7, v3) [all …]
|
D | relu_op_test.py | 151 with backprop.GradientTape() as tape: 152 tape.watch(x) 154 return tape.gradient(y, x) 169 with backprop.GradientTape() as tape: 170 tape.watch(x) 172 return tape.gradient(y, x) 197 with backprop.GradientTape() as tape: 198 tape.watch(x) 200 return tape.gradient(y, x) 340 with backprop.GradientTape() as tape: [all …]
|
/external/tensorflow/tensorflow/c/eager/ |
D | mnist_gradients_testutil.cc | 49 auto tape = new Tape(/*persistent=*/false); in AddGradModel() local 50 tape->Watch(inputs[0]); // Watch x. in AddGradModel() 51 tape->Watch(inputs[1]); // Watch y. in AddGradModel() 53 AbstractContextPtr tape_ctx(new TapeContext(ctx, tape, registry)); in AddGradModel() 56 TF_RETURN_IF_ERROR(tape->ComputeGradient(ctx, /*targets=*/add_outputs, in AddGradModel() 62 delete tape; in AddGradModel() 73 auto tape = new Tape(/*persistent=*/false); in MatMulGradModel() local 74 tape->Watch(inputs[0]); // Watch x. in MatMulGradModel() 75 tape->Watch(inputs[1]); // Watch y. in MatMulGradModel() 77 AbstractContextPtr tape_ctx(new TapeContext(ctx, tape, registry)); in MatMulGradModel() [all …]
|
D | gradients_test.cc | 78 auto tape = std::make_unique<Tape>(/*persistent=*/false); in IdentityNGradModel() local 79 tape->Watch(inputs[0]); in IdentityNGradModel() 80 tape->Watch(inputs[1]); in IdentityNGradModel() 83 AbstractContextPtr tape_ctx(new TapeContext(ctx, tape.get(), registry)); in IdentityNGradModel() 86 TF_RETURN_IF_ERROR(tape->ComputeGradient(ctx, in IdentityNGradModel() 196 auto tape = std::make_unique<Tape>(/*persistent=*/false); in TEST_P() local 198 &num_retvals, &forward_op, tape.get(), registry); in TEST_P() 210 Tape tape(/*persistent=*/false); in RecordOperationWithNullGradientFunctionModel() local 211 tape.Watch(inputs[0]); in RecordOperationWithNullGradientFunctionModel() 214 tape.RecordOperation(inputs, neg_outputs, nullptr, "Neg"); in RecordOperationWithNullGradientFunctionModel() [all …]
|
/external/tensorflow/tensorflow/python/eager/ |
D | tape.py | 40 def __init__(self, tape): argument 41 self._tape = tape 49 tape = pywrap_tfe.TFE_Py_TapeSetNew(persistent, watch_accessed_variables) 50 return Tape(tape) 53 def push_tape(tape): argument 55 pywrap_tfe.TFE_Py_TapeSetAdd(tape._tape) # pylint: disable=protected-access 58 def watch(tape, tensor): argument 60 pywrap_tfe.TFE_Py_TapeWatch(tape._tape, tensor) # pylint: disable=protected-access 95 def watch_variable(tape, variable): argument 104 pywrap_tfe.TFE_Py_TapeWatchVariable(tape._tape, var) # pylint: disable=protected-access [all …]
|
D | pywrap_tfe_test.py | 93 with backprop.GradientTape(persistent=True) as tape: 95 tape.watch(a_2_by_2) 99 dz_dy = tape.gradient(z, [a_2_by_2])[0] 109 with backprop.GradientTape(persistent=True) as tape: 112 tape.watch(m) 116 dz_dy = tape.gradient(z, [m])[0] 145 with backprop.GradientTape(persistent=True) as tape: 146 tape.watch(a_2_by_2) 147 tape.watch(b_2_by_2) 151 dz1_dy = tape.gradient(z1, [a_2_by_2])[0] [all …]
|
D | backprop_test.py | 29 from tensorflow.python.eager import tape as tape_lib 407 with backprop.GradientTape(persistent=True) as tape: 408 tape.watch(a_2_by_2) 409 dy_dy = tape.gradient(a_2_by_2, [a_2_by_2])[0] 416 with backprop.GradientTape(persistent=True) as tape: 417 tape.watch(a_2_by_2) 418 dy_dy = tape.gradient([a_2_by_2, a_2_by_2], [a_2_by_2])[0] 1360 with backprop.GradientTape() as tape: 1361 tape.watch(x) 1365 return self.evaluate(tape.gradient(y, x)) [all …]
|
D | pywrap_tfe.h | 159 void TFE_Py_TapeSetRemove(PyObject* tape); 162 void TFE_Py_TapeSetAdd(PyObject* tape); 179 void TFE_Py_TapeWatch(PyObject* tape, PyObject* tensor); 246 void TFE_Py_TapeWatchVariable(PyObject* tape, PyObject* variable); 253 PyObject* TFE_Py_TapeGradient(PyObject* tape, PyObject* target, 285 PyObject* TFE_Py_TapeWatchedVariables(PyObject* tape);
|
D | forwardprop_test.py | 33 from tensorflow.python.eager import tape as tape_lib 114 with backprop.GradientTape() as tape: 115 tape.watch(params) 117 return tape.gradient( 147 with backprop.GradientTape() as tape: 148 tape.watch(primals) 151 return acc.jvp(tape.gradient(f_out, primals)) 694 with backprop.GradientTape() as tape: 697 tape.watch(c) 709 self.assertIsNone(tape.gradient(d, c)) [all …]
|
D | function_gradients_test.py | 108 with backprop.GradientTape(persistent=persistent) as tape: 109 tape.watch(x) 114 tape_dy = tape.gradient(y, x) 134 with backprop.GradientTape() as tape: 135 tape.watch(primal) 137 return tape.gradient(primal_out, primal) 158 with backprop.GradientTape() as tape: 160 g, = tape.gradient(primal_out, tape.watched_variables()) 186 with backprop.GradientTape(persistent=True) as tape: 188 tape.watch(start) [all …]
|
/external/llvm-project/clang/test/SemaCXX/ |
D | constexpr-turing-cxx2a.cpp | 8 bool tape; member 37 Tape *tape = new Tape; in run() local 42 auto [val, dir, next_state] = tm[state][tape->val]; in run() 43 tape->val = val; in run() 44 tape = (dir == L ? tape->left() : tape->right()); in run() 48 delete tape; in run()
|
D | constexpr-turing.cpp | 10 bool tape; member 35 constexpr unsigned run(const State *tm, const Tape &tape, unsigned state) { in run() argument 37 run(tm, move(update(tape, tm[state][tape.val].tape), in run() 38 tm[state][tape.val].dir), in run() 39 tm[state][tape.val].next) + 1; in run()
|
/external/tensorflow/tensorflow/python/framework/experimental/ |
D | unified_api_test.py | 35 from tensorflow.python.framework.experimental import tape as tape_lib 91 with tape_lib.GradientTape() as tape: 92 tape.watch(a) 93 tape.watch(b) 95 grads = tape.gradient(result, [a, b]) 145 with tape_lib.GradientTape() as tape: 146 tape.watch(t) 148 grads = tape.gradient(result, t) 195 with tape_lib.GradientTape() as tape: 196 tape.watch(a) [all …]
|
/external/clang/test/SemaCXX/ |
D | constexpr-turing.cpp | 10 bool tape; member 35 constexpr unsigned run(const State *tm, const Tape &tape, unsigned state) { in run() argument 37 run(tm, move(update(tape, tm[state][tape.val].tape), in run() 38 tm[state][tape.val].dir), in run() 39 tm[state][tape.val].next) + 1; in run()
|
/external/tensorflow/tensorflow/c/experimental/gradients/ |
D | nn_grad_test.cc | 44 Tape tape(/*persistent=*/false); in ReluGradModel() local 45 tape.Watch(inputs[0]); in ReluGradModel() 47 AbstractContextPtr tape_ctx(new TapeContext(ctx, &tape, registry)); in ReluGradModel() 51 TF_RETURN_IF_ERROR(tape.ComputeGradient(ctx, /*targets=*/temp_outputs, in ReluGradModel() 84 Tape tape(/*persistent=*/false); in SparseSoftmaxCrossEntropyWithLogitsGradModel() local 85 tape.Watch(inputs[0]); // Watch score. in SparseSoftmaxCrossEntropyWithLogitsGradModel() 86 tape.Watch(inputs[1]); // Watch label. in SparseSoftmaxCrossEntropyWithLogitsGradModel() 88 AbstractContextPtr tape_ctx(new TapeContext(ctx, &tape, registry)); in SparseSoftmaxCrossEntropyWithLogitsGradModel() 93 TF_RETURN_IF_ERROR(tape.ComputeGradient(ctx, /*targets=*/temp_outputs, in SparseSoftmaxCrossEntropyWithLogitsGradModel() 114 Tape tape(/*persistent=*/false); in BiasAddGradModel() local [all …]
|
D | custom_gradient_test.cc | 71 Tape tape(/*persistent=*/false); in ExpWithPassThroughGrad() local 72 tape.Watch(inputs[0]); // Watch x. in ExpWithPassThroughGrad() 77 tape.RecordOperation(inputs, exp_outputs, gradient_function.release()); in ExpWithPassThroughGrad() 78 TF_RETURN_IF_ERROR(tape.ComputeGradient(ctx, in ExpWithPassThroughGrad()
|
/external/tensorflow/tensorflow/python/ops/ |
D | array_ops_test.py | 40 with backprop.GradientTape() as tape: 41 tape.watch(x) 43 grad_a = tape.gradient(a, x) 44 with backprop.GradientTape() as tape: 45 tape.watch(x) 47 grad_b = tape.gradient(b, x)
|
/external/tensorflow/tensorflow/python/distribute/ |
D | custom_training_loop_gradient_test.py | 80 with backprop.GradientTape() as tape: 81 tape.watch(x) # Manually watch non-variable tensors. 83 grads = tape.gradient(y, x) 107 with backprop.GradientTape() as tape: 108 tape.watch(x) # Manually watch non-variable tensors. 110 grads = tape.gradient(y, x) 140 with backprop.GradientTape() as tape: 142 return tape.gradient(y, x)
|
/external/tensorflow/tensorflow/python/keras/integration_test/ |
D | forwardprop_test.py | 61 with tf.GradientTape() as tape: 62 tape.watch(params) 64 return tape.gradient( 75 with tf.GradientTape() as tape: 76 tape.watch(primals) 79 return acc.jvp(tape.gradient(f_out, primals)) 211 with tf.GradientTape() as tape: 213 tape.watch(input_value) 215 jac_back = tape.jacobian(output, 252 with tf.GradientTape() as tape: [all …]
|
D | gradients_test.py | 49 with tf.GradientTape(persistent=True) as tape: 50 tape.watch(vars_to_grad) 54 grads_re = tape.gradient(out_re, vars_to_grad) 55 grads = tape.gradient(out, vars_to_grad) 96 with tf.GradientTape() as tape: 97 tape.watch(x) 99 return tape.batch_jacobian(y, x)
|
/external/tensorflow/tensorflow/python/keras/distribute/ |
D | custom_training_loop_models_test.py | 79 with backprop.GradientTape() as tape: 82 grads = tape.gradient(loss, model.variables) 104 with backprop.GradientTape() as tape: 107 grads = tape.gradient(loss, model.variables) 142 with backprop.GradientTape() as tape: 145 grads = tape.gradient(loss, model.variables) 167 with backprop.GradientTape() as tape: 170 grads = tape.gradient(loss, model.variables) 199 with backprop.GradientTape() as tape: 202 grads = tape.gradient(loss, model.variables) [all …]
|
/external/tensorflow/tensorflow/compiler/tests/ |
D | eager_test.py | 61 with backprop.GradientTape(persistent=True) as tape: 62 tape.watch(x) 63 tape.watch(y) 65 da_dx = tape.gradient(a, x) 66 da_dy = tape.gradient(a, y) 267 with backprop.GradientTape() as tape: 269 dy_dx = tape.gradient(y, x) 281 with backprop.GradientTape() as tape: 284 dy_dx = tape.gradient(y, embedding_matrix) 491 with backprop.GradientTape() as tape: [all …]
|
/external/tensorflow/tensorflow/python/keras/tests/ |
D | tracking_util_test.py | 116 with backprop.GradientTape() as tape: 119 gradients = tape.gradient(loss, variables) 124 with backprop.GradientTape() as tape: 127 gradients = tape.gradient(loss, variables) 206 with backprop.GradientTape() as tape: 209 gradients = tape.gradient(loss, variables) 283 with backprop.GradientTape() as tape: 286 gradients = tape.gradient(loss, variables) 305 with backprop.GradientTape() as tape: 308 gradients = tape.gradient(loss, variables) [all …]
|
D | memory_test.py | 69 with backprop.GradientTape() as tape: 72 tape.gradient(result, net.variables) 74 del tape
|