• Home
  • Raw
  • Download

Lines Matching full:backward

39   y[0].backward();  in TEST()
47 y[0].backward(); in TEST()
54 backward({res.sum()}, {}); in TEST()
64 backward({res}, {torch::ones({2, 2})}, {}, true); in TEST()
66 backward({res}, {torch::ones({2, 2})}); in TEST()
87 res.backward(torch::ones({2, 2}), false, true); in TEST()
123 x.backward(grad_output, false, true); in TEST()
160 static variable_list backward( in TEST() function
207 out.backward({}, /*keep_graph=*/true); in TEST()
210 out.backward({}, /*keep_graph=*/true); in TEST()
220 out.backward(); in TEST()
233 ASSERT_THROWS_WITH(z.backward(), "inplace"); in TEST()
283 static variable_list backward( in TEST() function
307 static variable_list backward( in TEST() function
326 res.sum().backward(go, false, true); in TEST()
343 static variable_list backward( in TEST() function
362 res.sum().backward(go, false, true); in TEST()
386 static variable_list backward( in TEST() function
390 // We have to test this within the backward function. in TEST()
470 static variable_list backward( in TEST() function
478 MyFunction::apply(x).backward(torch::ones(1), true, true); in TEST()
488 static variable_list backward( in TEST() function
498 MyFunction::apply(x).backward(); in TEST()
501 MyFunction::apply(x.pow(2)).backward(); in TEST()
504 MyFunction::apply(x).sum().backward(); in TEST()
518 static variable_list backward( in TEST() function
527 UndefinedGrad().apply({MyFunction::apply(x)})[0].backward(); in TEST()
537 static variable_list backward( in TEST() function
546 UndefinedGrad().apply({MyFunction::apply(x)})[0].backward(); in TEST()
556 static variable_list backward(AutogradContext* ctx, variable_list dy) { in TEST() function
579 static variable_list backward( in TEST() function
592 out.sum().backward(); in TEST()
603 static variable_list backward( in TEST() function
614 y.sum().backward(); in TEST()
626 static variable_list backward( in TEST() function
641 out[1].sum().backward(); in TEST()
653 static variable_list backward( in TEST() function
662 (r * x).sum().backward(); in TEST()
672 static variable_list backward( in TEST() function
686 q.sum().backward(); in TEST()
698 static variable_list backward( in TEST() function
723 static variable_list backward( in TEST() function
742 static variable_list backward( in TEST() function
754 y.sum().backward(); in TEST()
764 static variable_list backward( in TEST() function
775 MyFunction::apply(input1).sum().backward(), "expected shape"); in TEST()
786 static variable_list backward( in TEST() function
810 static variable_list backward(AutogradContext*, variable_list grad_output) { in TEST() function
825 static variable_list backward( in TEST() function
837 static variable_list backward(AutogradContext*, variable_list grad_output) { in TEST() function
850 c.backward(torch::ones(c.sizes()), false, false); in TEST()
872 static variable_list backward( in TEST() function
878 out.sum().backward(); in TEST()
886 out.sum().backward(); in TEST()
902 static variable_list backward( in TEST() function
910 apply(ctx->saved_data["x"].toTensor())[0].sum().backward(); in TEST()
919 DeepReenter::apply(v).sum().backward(); in TEST()
930 static variable_list backward(AutogradContext*, variable_list grad) { in TEST() function
945 static variable_list backward( in TEST() function
954 apply(ctx->saved_data["x"].toTensor())[0].sum().backward(); in TEST()
965 v.backward(); in TEST()
967 // All the reentrant tasks should be prioritized over the MyFunction backward in TEST()
990 z.backward(torch::ones({5, 5}), true, true); in TEST()
995 z.backward(torch::ones({5, 5}), true, true); in TEST()
999 z.backward(torch::ones({5, 5}), true, true); in TEST()
1008 z.backward(torch::ones({5, 5}), true, false); in TEST()
1013 z.backward(torch::ones({5, 5}), false, false); in TEST()
1039 out.backward(); in TEST()
1074 out.backward(); in TEST()
1114 out.backward(); in TEST()
1130 static variable_list backward(AutogradContext* ctx, variable_list grad) { in TEST() function
1150 (rx + ry).sum().backward(); in TEST()
1161 z.backward(torch::ones({5, 5}), false, false, {x}); in TEST()
1174 z.backward(torch::ones({5, 5}), false, false, std::vector<Variable>{}), in TEST()
1187 w.backward(torch::ones({5, 5}), false, false, std::vector<Variable>{x, z}); in TEST()
1201 z.backward(torch::ones({5, 5}), std::nullopt, true); in TEST()
1203 warnings.str().find("Using backward() with create_graph=True") != in TEST()
1209 torch::autograd::backward({z}, {torch::ones({5, 5})}, std::nullopt, true); in TEST()
1211 warnings.str().find("Using backward() with create_graph=True") != in TEST()
1345 ASSERT_THROWS_WITH(out1.backward(), "is not implemented"); in assertBasicChecks()
1350 out2.backward(), in assertBasicChecks()