| /external/tensorflow/tensorflow/python/distribute/ |
| D | mirrored_strategy_test.py | 67 distribution=[ 78 def testMinimizeLoss(self, distribution): argument 80 self._test_minimize_loss_eager(distribution) 82 self._test_minimize_loss_graph(distribution) 84 def testReplicaId(self, distribution): argument 85 self._test_replica_id(distribution) 87 def testNumReplicasInSync(self, distribution): argument 88 self.assertEqual(2, distribution.num_replicas_in_sync) 90 def testCallAndMergeExceptions(self, distribution): argument 91 self._test_call_and_merge_exceptions(distribution) [all …]
|
| D | custom_training_loop_input_test.py | 79 distribution=strategy_combinations.all_strategies, 82 def testConstantNumpyInput(self, distribution): argument 90 outputs = distribution.experimental_local_results( 91 distribution.run(computation, args=(x,))) 95 constant_op.constant(4., shape=(distribution.num_replicas_in_sync)), 100 distribution=strategy_combinations.all_strategies, 103 def testStatefulExperimentalRunAlwaysExecute(self, distribution): argument 104 with distribution.scope(): 114 distribution.run(assign_add) 122 distribution=strategy_combinations.strategies_minus_tpu, [all …]
|
| D | one_device_strategy_test.py | 31 distribution=[ 40 def testMinimizeLoss(self, distribution): argument 42 self._test_minimize_loss_eager(distribution) 44 self._test_minimize_loss_graph(distribution) 46 def testReplicaId(self, distribution): argument 47 self._test_replica_id(distribution) 49 def testCallAndMergeExceptions(self, distribution): argument 50 self._test_call_and_merge_exceptions(distribution) 52 def testReplicateDataset(self, distribution): argument 62 self._test_input_fn_iterable(distribution, input_fn, expected_values) [all …]
|
| D | mirrored_variable_test.py | 63 distribution=[ 103 def testVariableInFuncGraph(self, distribution): argument 110 with func_graph.FuncGraph("fg").as_default(), distribution.scope(): 112 v2 = distribution.extended.call_for_each_replica(model_fn) 114 self._test_mv_properties(v1, "foo:0", distribution) 115 self._test_mv_properties(v2, "bar:0", distribution) 117 def testVariableWithTensorInitialValueInFunction(self, distribution): argument 132 return distribution.experimental_local_results( 133 distribution.extended.call_for_each_replica(model_fn)) 137 def testSingleVariable(self, distribution): argument [all …]
|
| D | vars_test.py | 52 # distribution=[ 59 distribution=[ 70 distribution=[ 85 def testAssign(self, distribution, experimental_run_tf_function): argument 94 return test_util.gather(distribution, distribution.run(update_fn)) 112 with distribution.scope(): 124 def testAssignOnWriteVar(self, distribution, experimental_run_tf_function): argument 126 with distribution.scope(): 139 return test_util.gather(distribution, distribution.run(update_fn)) 157 with distribution.scope(): [all …]
|
| D | values_test.py | 58 distribution=[ 72 distribution=(strategy_combinations.all_strategies_minus_default + 76 def testMakeDistributedValueFromTensor(self, distribution): argument 85 distribution.experimental_distribute_values_from_function(value_fn)) 87 ds_test_util.gather(distribution, distributed_values), 88 constant_op.constant(1., shape=(distribution.num_replicas_in_sync))) 92 distribution=(strategy_combinations.all_strategies_minus_default + 96 def testMakeDistributedValueSingleNumpyArrayConstant(self, distribution): argument 105 distribution.experimental_distribute_values_from_function(value_fn)) 107 ds_test_util.gather(distribution, distributed_values).numpy(), [all …]
|
| D | input_lib_test.py | 287 distribution=[ 290 def testMultiDeviceIterInitialize(self, distribution): argument 300 dataset_fn(distribute_lib.InputContext()), input_workers, distribution) 316 distribution=[ 321 def testOneDeviceCPU(self, input_type, api_type, iteration_type, distribution, argument 330 distribution.extended.experimental_enable_get_next_as_optional = ( 334 expected_values, distribution) 342 distribution=[strategy_combinations.multi_worker_mirrored_2x1_cpu], 345 distribution, enable_get_next_as_optional): argument 353 distribution.extended.experimental_enable_get_next_as_optional = ( [all …]
|
| D | metrics_v1_test.py | 73 distribution=[ 84 distribution=[ 95 def _test_metric(self, distribution, dataset_fn, metric_fn, expected_fn): argument 96 with ops.Graph().as_default(), distribution.scope(): 97 iterator = distribution.make_input_fn_iterator(lambda _: dataset_fn()) 98 if strategy_test_lib.is_tpu_strategy(distribution): 100 value, update = distribution.extended.call_for_each_replica( 103 return distribution.group(update) 105 ctx = distribution.extended.experimental_run_steps_on_iterator( 106 step_fn, iterator, iterations=distribution.extended.steps_per_run) [all …]
|
| D | moving_averages_test.py | 44 distribution=all_distributions, mode=["graph"]) 47 distribution=all_distributions, mode=["eager"], use_function=[True, False]) 53 def testReplicaModeWithoutZeroDebias(self, distribution): argument 65 with distribution.scope(): 66 var, assign = distribution.extended.call_for_each_replica(replica_fn) 69 self.evaluate(distribution.experimental_local_results(assign)) 80 def testReplicaMode(self, distribution): argument 91 with distribution.scope(): 92 var, assign_op = distribution.extended.call_for_each_replica(replica_fn) 95 self.evaluate(distribution.experimental_local_results(assign_op)) [all …]
|
| D | input_lib_type_spec_test.py | 51 distribution=[ 56 def testTypeSpec(self, input_type, distribution, argument 63 distribution.extended.experimental_enable_get_next_as_optional = ( 66 dist_dataset = distribution.experimental_distribute_dataset(dataset) 67 with distribution.scope(): 83 distribution=[ 89 distribution, enable_get_next_as_optional): argument 96 distribution.extended.experimental_enable_get_next_as_optional = ( 99 dist_dataset = distribution.experimental_distribute_dataset(dataset) 100 with distribution.scope(): [all …]
|
| D | distributed_variable_test.py | 63 distribution=[ 74 distribution=[ 100 def testExtendsVariable(self, distribution, synchronization, aggregation): argument 101 with distribution.scope(): 106 def testCheckpointing(self, distribution, synchronization, aggregation, mode): argument 108 if (isinstance(distribution, 113 with distribution.scope(): 139 def testTraceback(self, distribution, synchronization, aggregation): argument 142 with distribution.scope(): 158 def testSelectReplica(self, distribution, synchronization, aggregation): argument [all …]
|
| D | tf_function_test.py | 15 """Tests for tf.function + distribution strategies.""" 48 distribution=strategy_combinations.all_strategies, 53 self, distribution, run_functions_eagerly): argument 57 worker = distribution.extended.worker_devices[0] 62 with distribution.scope(): 74 distribution=strategy_combinations.all_strategies, 79 self, distribution, run_functions_eagerly): argument 83 worker = distribution.extended.worker_devices[0] 88 with distribution.scope(): 105 distribution=strategy_combinations.all_strategies, [all …]
|
| /external/rust/crates/criterion/src/stats/ |
| D | tuple.rs | 3 use crate::stats::Distribution; 14 /// A tuple of distributions: `(Distribution<A>, Distribution<B>, ..)` 42 type Distributions = (Distribution<A>,); 46 impl<A> TupledDistributions for (Distribution<A>,) implementation 70 fn complete(self) -> (Distribution<A>,) { in complete() 71 (Distribution(self.0.into_boxed_slice()),) in complete() 80 type Distributions = (Distribution<A>, Distribution<B>); 84 impl<A, B> TupledDistributions for (Distribution<A>, Distribution<B>) implementation 112 fn complete(self) -> (Distribution<A>, Distribution<B>) { in complete() 114 Distribution(self.0.into_boxed_slice()), in complete() [all …]
|
| /external/tensorflow/tensorflow/python/ops/ |
| D | nn_loss_scaling_utilities_test.py | 69 distribution=[ 73 def testComputeAverageLossDefaultGlobalBatchSize(self, distribution): argument 80 with distribution.scope(): 81 per_replica_losses = distribution.run( 83 loss = distribution.reduce("SUM", per_replica_losses, axis=None) 88 distribution=[ 92 def testComputeAverageLossSampleWeights(self, distribution): argument 93 with distribution.scope(): 95 per_replica_losses = distribution.run( 99 loss = distribution.reduce("SUM", per_replica_losses, axis=None) [all …]
|
| /external/tensorflow/tensorflow/python/ops/distributions/ |
| D | transformed_distribution.py | 15 """A Transformed Distribution class.""" 26 from tensorflow.python.ops.distributions import distribution as distribution_lib 118 class TransformedDistribution(distribution_lib.Distribution): 119 """A Transformed Distribution. 121 A `TransformedDistribution` models `p(y)` given a base distribution `p(x)`, 124 distribution is typically an instance of the `Distribution` class. 133 `Distribution` associated with a random variable (rv) `X`. 135 Write `cdf(Y=y)` for an absolutely continuous cumulative distribution function 147 Programmatically: `bijector.forward(distribution.sample(...))` 152 Programmatically: `(distribution.log_prob(bijector.inverse(y)) [all …]
|
| /external/python/setuptools/pkg_resources/ |
| D | api_tests.txt | 7 A "Distribution" is a collection of files that represent a "Release" of a 12 >>> from pkg_resources import Distribution 13 >>> Distribution(project_name="Foo", version="1.2") 19 >>> dist = Distribution( 56 >>> Distribution(version='1.0') == Distribution(version='1.0') 58 >>> Distribution(version='1.0') == Distribution(version='1.1') 60 >>> Distribution(version='1.0') < Distribution(version='1.1') 66 >>> Distribution(project_name="Foo",version="1.0") == \ 67 ... Distribution(project_name="Foo",version="1.0") 70 >>> Distribution(project_name="Foo",version="1.0") == \ [all …]
|
| /external/tensorflow/tensorflow/core/kernels/ |
| D | random_op_gpu.h | 31 template <class Distribution, bool VariableSamplesPerOutput> 34 template <class Distribution> 35 struct FillPhiloxRandomKernel<Distribution, false> { 36 typedef typename Distribution::ResultElementType T; 39 Distribution dist); 42 template <class Distribution> 43 struct FillPhiloxRandomKernel<Distribution, true> { 44 typedef typename Distribution::ResultElementType T; 47 int64 size, Distribution dist); 139 // distribution. Each output takes a fixed number of samples. [all …]
|
| D | random_op_cpu.h | 60 template <typename Device, class Distribution> 62 typedef typename Distribution::ResultElementType T; 65 int64_t size, Distribution dist) { in operator() 71 "not support this device or random distribution yet.")); in operator() 76 template <class Distribution, bool VariableSamplesPerOutput> 79 // Specialization for distribution that takes a fixed number of samples for 81 template <class Distribution> 82 struct FillPhiloxRandomTask<Distribution, false> { 83 typedef typename Distribution::ResultElementType T; 85 int64_t start_group, int64_t limit_group, Distribution dist) { [all …]
|
| /external/tensorflow/tensorflow/tools/api/golden/v1/ |
| D | tensorflow.distributions.pbtxt | 5 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 9 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 13 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 17 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 21 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 24 name: "Distribution" 25 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 29 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" 33 mtype: "<class \'tensorflow.python.ops.distributions.distribution.ReparameterizationType\'>" 37 mtype: "<class \'tensorflow.python.ops.distributions.distribution._DistributionMeta\'>" [all …]
|
| /external/python/setuptools/docs/ |
| D | pkg_resources.rst | 33 Eggs are a distribution format for Python modules, similar in concept to 35 However, unlike a pure distribution format, eggs can also be installed and 42 a distribution to co-exist in the same Python installation, with individual 60 distribution 63 importable distribution 67 pluggable distribution 68 An importable distribution whose filename unambiguously identifies its 82 necessarily active. More than one distribution (i.e. release version) for 87 ``sys.path``. At most one distribution (release version) of a given 99 default version of a distribution that is available to software that [all …]
|
| /external/apache-commons-math/src/main/java/org/apache/commons/math/distribution/ |
| D | PoissonDistributionImpl.java | 17 package org.apache.commons.math.distribution; 51 /** Distribution used to compute normal approximation. */ 55 * Holds the Poisson mean for the distribution. 73 * Create a new Poisson distribution with the given the mean. The mean value 84 * Create a new Poisson distribution with the given mean, convergence criterion 99 * Create a new Poisson distribution with the given mean and convergence criterion. 111 * Create a new Poisson distribution with the given mean and maximum number of iterations. 124 * Create a new Poisson distribution with the given the mean. The mean value 128 * @param z a normal distribution used to compute normal approximations. 141 * Get the Poisson mean for the distribution. [all …]
|
| /external/apache-commons-math/src/main/java/org/apache/commons/math/random/ |
| D | RandomDataImpl.java | 28 import org.apache.commons.math.distribution.BetaDistributionImpl; 29 import org.apache.commons.math.distribution.BinomialDistributionImpl; 30 import org.apache.commons.math.distribution.CauchyDistributionImpl; 31 import org.apache.commons.math.distribution.ChiSquaredDistributionImpl; 32 import org.apache.commons.math.distribution.ContinuousDistribution; 33 import org.apache.commons.math.distribution.FDistributionImpl; 34 import org.apache.commons.math.distribution.GammaDistributionImpl; 35 import org.apache.commons.math.distribution.HypergeometricDistributionImpl; 36 import org.apache.commons.math.distribution.IntegerDistribution; 37 import org.apache.commons.math.distribution.PascalDistributionImpl; [all …]
|
| D | EmpiricalDistribution.java | 30 * empirical probability distribution</a> -- a probability distribution derived 32 * of the population distribution that the data come from.<p> 34 * <i>distribution digests</i>, that describe empirical distributions and 36 * <li>loading the distribution from a file of observed data values</li> 41 * <li>generating random values from the distribution</li> 46 * generated will follow the distribution of the values in the file.</p> 53 * Computes the empirical distribution from the provided 61 * Computes the empirical distribution from the input file. 69 * Computes the empirical distribution using data read from a URL. 77 * Generates a random value from this distribution. [all …]
|
| /external/rust/crates/criterion/src/ |
| D | estimate.rs | 3 use crate::stats::Distribution; 50 let to_estimate = |point_estimate, distribution: &Distribution<f64>| { in build_estimates() 51 let (lb, ub) = distribution.confidence_interval(cl); in build_estimates() 60 standard_error: distribution.std_dev(None), in build_estimates() 78 let to_estimate = |point_estimate, distribution: &Distribution<f64>| { in build_change_estimates() 79 let (lb, ub) = distribution.confidence_interval(cl); in build_change_estimates() 88 standard_error: distribution.std_dev(None), in build_change_estimates() 130 pub mean: Distribution<f64>, 131 pub median: Distribution<f64>, 132 pub median_abs_dev: Distribution<f64>, [all …]
|
| /external/rust/crates/rand/src/distributions/ |
| D | mod.rs | 12 //! This module is the home of the [`Distribution`] trait and several of its 17 //! Abstractly, a [probability distribution] describes the probability of 20 //! More concretely, an implementation of `Distribution<T>` for type `X` is an 22 //! according to the distribution `X` represents, using an external source of 25 //! A type `X` may implement `Distribution<T>` for multiple types `T`. 26 //! Any type implementing [`Distribution`] is stateless (i.e. immutable), 31 //! # The `Standard` distribution 33 //! The [`Standard`] distribution is important to mention. This is the 34 //! distribution used by [`Rng::gen`] and represents the "default" way to 39 //! Implementing `Distribution<T>` for [`Standard`] for user types `T` makes it [all …]
|