Searched refs:kl_loss (Results 1 – 25 of 29) sorted by relevance
12
/third_party/mindspore/mindspore/nn/probability/bnn_layers/ |
D | bnn_cell_wrapper.py | 82 self.kl_loss = [] 88 kl_loss = 0 89 for i in range(len(self.kl_loss)): 90 kl_loss += self.kl_loss[i]() 91 loss = backbone_loss * self.dnn_factor + kl_loss * self.bnn_factor 98 self.kl_loss.append(layer.compute_kl_loss)
|
D | conv_variational.py | 126 kl_loss = self.sum(kl) 133 kl_loss += kl 134 return kl_loss
|
D | dense_variational.py | 103 kl_loss = self.sum(kl) 110 kl_loss += kl 111 return kl_loss
|
/third_party/mindspore/tests/st/probability/distribution/ |
D | test_get_dist_args.py | 43 kl_loss = self.normal2.kl_loss(dist_type, *args_list2) 47 kl_loss1 = self.normal2.kl_loss(dist_type, *args_list3) 48 return prob, prob1, kl_loss, kl_loss1 83 kl_loss = self.expon2.kl_loss(dist_type, *args_list2) 87 kl_loss1 = self.expon2.kl_loss(dist_type, *args_list3) 88 return prob, prob1, kl_loss, kl_loss1
|
D | test_beta.py | 80 return self.b.kl_loss('Beta', x_, y_) 101 kl_loss = KL() 104 output = kl_loss(concentration1, concentration0) 190 kl_loss = self.b.kl_loss('Beta', x_, y_) 191 h_sum_kl = entropy + kl_loss 217 kl = self.beta.kl_loss('Beta', x_, y_)
|
D | test_categorical.py | 90 return self.c.kl_loss('Categorical', x_) 97 kl_loss = KL() 98 output = kl_loss(Tensor([0.7, 0.3], dtype=dtype.float32)) 294 kl_loss = self.c.kl_loss('Categorical', x_) 295 h_sum_kl = entropy + kl_loss
|
D | test_bernoulli.py | 92 return self.b.kl_loss('Bernoulli', x_) 105 kl_loss = KL() 106 output = kl_loss(Tensor([probs1_b], dtype=dtype.float32)) 310 kl_loss = self.b.kl_loss('Bernoulli', x_) 311 h_sum_kl = entropy + kl_loss
|
D | test_geometric.py | 91 return self.g.kl_loss('Geometric', x_) 104 kl_loss = KL() 105 output = kl_loss(Tensor([probs1_b], dtype=dtype.float32)) 307 kl_loss = self.g.kl_loss('Geometric', x_) 308 h_sum_kl = entropy + kl_loss
|
D | test_gamma.py | 81 return self.g.kl_loss('Gamma', x_, y_) 99 kl_loss = KL() 102 output = kl_loss(concentration, rate) 277 kl_loss = self.g.kl_loss('Gamma', x_, y_) 278 h_sum_kl = entropy + kl_loss 304 kl = self.g.kl_loss('Gamma', x_, y_)
|
D | test_normal.py | 80 return self.n.kl_loss('Normal', x_, y_) 97 kl_loss = KL() 100 output = kl_loss(mean, sd) 273 kl_loss = self.n.kl_loss('Normal', x_, y_) 274 h_sum_kl = entropy + kl_loss 300 kl = self.normal.kl_loss('Normal', x_, y_)
|
D | test_cauchy.py | 79 return self.c.kl_loss('Cauchy', mu, s) 96 kl_loss = KL() 99 output = kl_loss(loc, scale) 268 kl_loss = self.c.kl_loss('Cauchy', mu, s) 269 h_sum_kl = entropy + kl_loss
|
D | test_lognormal.py | 79 return self.ln.kl_loss('LogNormal', x_, y_) 95 kl_loss = KL() 98 output = kl_loss(mean, sd) 272 kl_loss = self.ln.kl_loss('LogNormal', x_, y_) 273 h_sum_kl = entropy + kl_loss 299 kl = self.LogNormal.kl_loss('LogNormal', x_, y_)
|
D | test_gumbel.py | 85 return self.gum.kl_loss('Gumbel', loc_b, scale_b) 101 kl_loss = KL() 104 output = kl_loss(loc_b, scale_b) 289 kl_loss = self.gum.kl_loss('Gumbel', x_, y_) 290 h_sum_kl = entropy + kl_loss
|
D | test_uniform.py | 81 return self.u.kl_loss('Uniform', x_, y_) 232 kl_loss = self.u.kl_loss('Uniform', x_, y_) 233 h_sum_kl = entropy + kl_loss
|
D | test_exponential.py | 81 return self.e.kl_loss('Exponential', x_) 267 kl_loss = self.e.kl_loss('Exponential', x_) 268 h_sum_kl = entropy + kl_loss
|
/third_party/mindspore/mindspore/nn/probability/infer/variational/ |
D | elbo.py | 68 kl_loss = self.posterior('kl_loss', 'Normal', self.zeros(mu), self.zeros(mu)+1, mu, std) 69 elbo = reconstruct_loss + self.sum(kl_loss)
|
/third_party/mindspore/tests/st/probability/dpn/ |
D | test_gpu_vae_gan.py | 124 kl_loss = self.posterior('kl_loss', 'Normal', self.zeros(mu), self.zeros(mu) + 1, mu, std) 125 elbo_loss = reconstruct_loss + self.sum(kl_loss)
|
/third_party/mindspore/tests/ut/python/nn/probability/distribution/ |
D | test_exponential.py | 119 kl1 = self.e1.kl_loss('Exponential', rate_b) 120 kl2 = self.e2.kl_loss('Exponential', rate_b, rate_a)
|
D | test_geometric.py | 135 kl1 = self.g1.kl_loss('Geometric', probs_b) 136 kl2 = self.g2.kl_loss('Geometric', probs_b, probs_a)
|
D | test_beta.py | 130 kl1 = self.g1.kl_loss('Gamma', concentration1_b, concentration0_b) 131 …kl2 = self.g2.kl_loss('Gamma', concentration1_b, concentration0_b, concentration1_a, concentration…
|
D | test_cauchy.py | 125 kl = self.cauchy.kl_loss('Cauchy', mu, s) 126 kl1 = self.cauchy1.kl_loss('Cauchy', mu, s, mu_a, s_a)
|
D | test_bernoulli.py | 135 kl1 = self.b1.kl_loss('Bernoulli', probs_b) 136 kl2 = self.b2.kl_loss('Bernoulli', probs_b, probs_a)
|
D | test_uniform.py | 142 kl1 = self.u1.kl_loss('Uniform', low_b, high_b) 143 kl2 = self.u2.kl_loss('Uniform', low_b, high_b, low_a, high_a)
|
D | test_lognormal.py | 126 kl1 = self.n1.kl_loss('LogNormal', mean_b, sd_b) 127 kl2 = self.n2.kl_loss('LogNormal', mean_b, sd_b, mean_a, sd_a)
|
D | test_categorical.py | 159 kl1 = self.c1.kl_loss('Categorical', probs_b) 160 kl2 = self.c2.kl_loss('Categorical', probs_b, probs_a)
|
12