• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2019 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15"""test cases for Cauchy distribution"""
16import numpy as np
17from scipy import stats
18import mindspore.context as context
19import mindspore.nn as nn
20import mindspore.nn.probability.distribution as msd
21from mindspore import Tensor
22from mindspore import dtype
23
24context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
25
26class Prob(nn.Cell):
27    """
28    Test class: probability of Cauchy distribution.
29    """
30    def __init__(self):
31        super(Prob, self).__init__()
32        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
33
34    def construct(self, x_):
35        return self.c.prob(x_)
36
37def test_pdf():
38    """
39    Test pdf.
40    """
41    cauchy_benchmark = stats.cauchy(np.array([3.0]), np.array([[2.0], [4.0]]))
42    expect_pdf = cauchy_benchmark.pdf([1.0, 2.0]).astype(np.float32)
43    pdf = Prob()
44    output = pdf(Tensor([1.0, 2.0], dtype=dtype.float32))
45    tol = 1e-6
46    assert (np.abs(output.asnumpy() - expect_pdf) < tol).all()
47
48class LogProb(nn.Cell):
49    """
50    Test class: log probability of Cauchy distribution.
51    """
52    def __init__(self):
53        super(LogProb, self).__init__()
54        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
55
56    def construct(self, x_):
57        return self.c.log_prob(x_)
58
59def test_log_likelihood():
60    """
61    Test log_pdf.
62    """
63    cauchy_benchmark = stats.cauchy(np.array([3.0]), np.array([[2.0], [4.0]]))
64    expect_logpdf = cauchy_benchmark.logpdf([1.0, 2.0]).astype(np.float32)
65    logprob = LogProb()
66    output = logprob(Tensor([1.0, 2.0], dtype=dtype.float32))
67    tol = 1e-6
68    assert (np.abs(output.asnumpy() - expect_logpdf) < tol).all()
69
70class KL(nn.Cell):
71    """
72    Test class: kl_loss of Cauchy distribution.
73    """
74    def __init__(self):
75        super(KL, self).__init__()
76        self.c = msd.Cauchy(np.array([3.]), np.array([4.]), dtype=dtype.float32)
77
78    def construct(self, mu, s):
79        return self.c.kl_loss('Cauchy', mu, s)
80
81def test_kl_loss():
82    """
83    Test kl_loss.
84    """
85    loc_b = np.array([0.]).astype(np.float32)
86    scale_b = np.array([1.]).astype(np.float32)
87
88    loc_a = np.array([3.0]).astype(np.float32)
89    scale_a = np.array([4.0]).astype(np.float32)
90
91    sum_square = np.square(scale_a + scale_b)
92    square_diff = np.square(loc_a - loc_b)
93    expect_kl_loss = np.log(sum_square + square_diff) - \
94                np.log(4.0 * scale_a  * scale_b)
95
96    kl_loss = KL()
97    loc = Tensor(loc_b, dtype=dtype.float32)
98    scale = Tensor(scale_b, dtype=dtype.float32)
99    output = kl_loss(loc, scale)
100    tol = 1e-6
101    assert (np.abs(output.asnumpy() - expect_kl_loss) < tol).all()
102
103class Basics(nn.Cell):
104    """
105    Test class: mode of Cauchy distribution.
106    """
107    def __init__(self):
108        super(Basics, self).__init__()
109        self.c = msd.Cauchy(np.array([3.0]), np.array([2.0, 4.0]), dtype=dtype.float32)
110
111    def construct(self):
112        return self.c.mode()
113
114def test_basics():
115    """
116    Test mode.
117    """
118    basics = Basics()
119    mode = basics()
120    expect_mode = np.array([3.0, 3.0])
121    tol = 1e-6
122    assert (np.abs(mode.asnumpy() - expect_mode) < tol).all()
123
124class Sampling(nn.Cell):
125    """
126    Test class: sample of Cauchy distribution.
127    """
128    def __init__(self, shape, seed=0):
129        super(Sampling, self).__init__()
130        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), seed=seed, dtype=dtype.float32)
131        self.shape = shape
132
133    def construct(self, mean=None, sd=None):
134        return self.c.sample(self.shape, mean, sd)
135
136def test_sample():
137    """
138    Test sample.
139    """
140    shape = (2, 3)
141    seed = 10
142    mean = Tensor([2.0], dtype=dtype.float32)
143    sd = Tensor([2.0, 2.0, 2.0], dtype=dtype.float32)
144    sample = Sampling(shape, seed=seed)
145    output = sample(mean, sd)
146    assert output.shape == (2, 3, 3)
147
148class CDF(nn.Cell):
149    """
150    Test class: cdf of Cauchy distribution.
151    """
152    def __init__(self):
153        super(CDF, self).__init__()
154        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
155
156    def construct(self, x_):
157        return self.c.cdf(x_)
158
159
160def test_cdf():
161    """
162    Test cdf.
163    """
164    cauchy_benchmark = stats.cauchy(np.array([3.0]), np.array([[2.0], [4.0]]))
165    expect_cdf = cauchy_benchmark.cdf([1.0, 2.0]).astype(np.float32)
166    cdf = CDF()
167    output = cdf(Tensor([1.0, 2.0], dtype=dtype.float32))
168    tol = 2e-5
169    assert (np.abs(output.asnumpy() - expect_cdf) < tol).all()
170
171class LogCDF(nn.Cell):
172    """
173    Test class: log_cdf of Cauchy distribution.
174    """
175    def __init__(self):
176        super(LogCDF, self).__init__()
177        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
178
179    def construct(self, x_):
180        return self.c.log_cdf(x_)
181
182def test_log_cdf():
183    """
184    Test log cdf.
185    """
186    cauchy_benchmark = stats.cauchy(np.array([3.0]), np.array([[2.0], [4.0]]))
187    expect_logcdf = cauchy_benchmark.logcdf([1.0, 2.0]).astype(np.float32)
188    logcdf = LogCDF()
189    output = logcdf(Tensor([1.0, 2.0], dtype=dtype.float32))
190    tol = 5e-5
191    assert (np.abs(output.asnumpy() - expect_logcdf) < tol).all()
192
193class SF(nn.Cell):
194    """
195    Test class: survival function of Cauchy distribution.
196    """
197    def __init__(self):
198        super(SF, self).__init__()
199        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
200
201    def construct(self, x_):
202        return self.c.survival_function(x_)
203
204def test_survival():
205    """
206    Test log_survival.
207    """
208    cauchy_benchmark = stats.cauchy(np.array([3.0]), np.array([[2.0], [4.0]]))
209    expect_survival = cauchy_benchmark.sf([1.0, 2.0]).astype(np.float32)
210    survival_function = SF()
211    output = survival_function(Tensor([1.0, 2.0], dtype=dtype.float32))
212    tol = 2e-5
213    assert (np.abs(output.asnumpy() - expect_survival) < tol).all()
214
215class LogSF(nn.Cell):
216    """
217    Test class: log survival function of Cauchy distribution.
218    """
219    def __init__(self):
220        super(LogSF, self).__init__()
221        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
222
223    def construct(self, x_):
224        return self.c.log_survival(x_)
225
226def test_log_survival():
227    """
228    Test log_survival.
229    """
230    cauchy_benchmark = stats.cauchy(np.array([3.0]), np.array([[2.0], [4.0]]))
231    expect_log_survival = cauchy_benchmark.logsf([1.0, 2.0]).astype(np.float32)
232    log_survival = LogSF()
233    output = log_survival(Tensor([1.0, 2.0], dtype=dtype.float32))
234    tol = 2e-5
235    assert (np.abs(output.asnumpy() - expect_log_survival) < tol).all()
236
237class EntropyH(nn.Cell):
238    """
239    Test class: entropy of Cauchy distribution.
240    """
241    def __init__(self):
242        super(EntropyH, self).__init__()
243        self.c = msd.Cauchy(np.array([3.0]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
244
245    def construct(self):
246        return self.c.entropy()
247
248def test_entropy():
249    """
250    Test entropy.
251    """
252    expect_entropy = np.log(4 * np.pi * np.array([[2.0], [4.0]]))
253    entropy = EntropyH()
254    output = entropy()
255    tol = 1e-6
256    assert (np.abs(output.asnumpy() - expect_entropy) < tol).all()
257
258class CrossEntropy(nn.Cell):
259    """
260    Test class: cross entropy between Cauchy distributions.
261    """
262    def __init__(self):
263        super(CrossEntropy, self).__init__()
264        self.c = msd.Cauchy(np.array([3.]), np.array([[2.0], [4.0]]), dtype=dtype.float32)
265
266    def construct(self, mu, s):
267        entropy = self.c.entropy()
268        kl_loss = self.c.kl_loss('Cauchy', mu, s)
269        h_sum_kl = entropy + kl_loss
270        cross_entropy = self.c.cross_entropy('Cauchy', mu, s)
271        return h_sum_kl - cross_entropy
272
273def test_cross_entropy():
274    """
275    Test cross_entropy.
276    """
277    cross_entropy = CrossEntropy()
278    mean = Tensor([1.0], dtype=dtype.float32)
279    sd = Tensor([1.0], dtype=dtype.float32)
280    diff = cross_entropy(mean, sd)
281    tol = 1e-6
282    assert (np.abs(diff.asnumpy() - np.zeros(diff.shape)) < tol).all()
283