Lines Matching full:dropout
74 o_ref = torch.dropout(x_ref, p, train)
82 self.assertRaises(ValueError, lambda: nn.Dropout(-0.1))
83 self.assertRaises(ValueError, lambda: nn.Dropout(1.1))
90 self.assertRaises(ValueError, lambda: F.dropout(v, -0.1))
91 self.assertRaises(ValueError, lambda: F.dropout(v, 1.1))
129 … # In this test, we verify that dropout preserves the layout and data for different memory formats.
130 # We check whether, we get same values for the output of dropout, when the probability
131 # of dropout is 0 or very close to 0.
174 self._test_dropout(nn.Dropout, device, input)
176 self._test_dropout_discontiguous(nn.Dropout, device)
178 nn.Dropout, device, memory_format=torch.channels_last
181 self._test_dropout_stride_mean_preserve(nn.Dropout, device)
185 self._test_dropout(nn.Dropout, device, input)
187 def _test_dropoutNd_no_batch(self, dropout, input): argument
190 res_no_batch = dropout(input)
193 res_batched = dropout(input_clone.unsqueeze(0)).squeeze(0)
197 def _test_dropoutNd_channel_zero(self, dropout, input): argument
204 result = dropout(input)
272 UserWarning, "assuming that channel-wise 1D dropout behavior is desired"
314 out = torch.nn.functional.dropout(x)