• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2019 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15"""test cases for invert"""
16import numpy as np
17import mindspore.context as context
18import mindspore.nn as nn
19import mindspore.nn.probability.bijector as msb
20from mindspore import Tensor
21from mindspore import dtype
22
23context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")
24
25class Net(nn.Cell):
26    """
27    Test class: forward pass of bijector.
28    """
29    def __init__(self):
30        super(Net, self).__init__()
31        self.origin = msb.ScalarAffine(scale=2.0, shift=1.0)
32        self.invert = msb.Invert(self.origin)
33
34    def construct(self, x_):
35        return self.invert.forward(x_), self.origin.inverse(x_)
36
37def test_forward():
38    forward = Net()
39    x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32)
40    ans, ans2 = forward(Tensor(x, dtype=dtype.float32))
41    tol = 1e-6
42    assert (np.abs(ans.asnumpy() - ans2.asnumpy()) < tol).all()
43
44class Net1(nn.Cell):
45    """
46    Test class: backward pass of bijector.
47    """
48    def __init__(self):
49        super(Net1, self).__init__()
50        self.origin = msb.ScalarAffine(scale=2.0, shift=1.0)
51        self.invert = msb.Invert(self.origin)
52
53    def construct(self, x_):
54        return self.invert.inverse(x_), self.origin.forward(x_)
55
56def test_backward():
57    backward = Net1()
58    x = np.array([2.0, 3.0, 4.0, 5.0]).astype(np.float32)
59    ans, ans2 = backward(Tensor(x, dtype=dtype.float32))
60    tol = 1e-6
61    assert (np.abs(ans.asnumpy() - ans2.asnumpy()) < tol).all()
62
63class Net2(nn.Cell):
64    """
65    Test class: Forward Jacobian.
66    """
67    def __init__(self):
68        super(Net2, self).__init__()
69        self.origin = msb.ScalarAffine(scale=2.0, shift=1.0)
70        self.invert = msb.Invert(self.origin)
71
72    def construct(self, x_):
73        return self.invert.forward_log_jacobian(x_),\
74               self.origin.inverse_log_jacobian(x_)
75
76def test_forward_jacobian():
77    forward_jacobian = Net2()
78    x = Tensor([2.0, 3.0, 4.0, 5.0], dtype=dtype.float32)
79    ans, ans2 = forward_jacobian(x)
80    tol = 1e-6
81    assert (np.abs(ans.asnumpy() - ans2.asnumpy()) < tol).all()
82
83class Net3(nn.Cell):
84    """
85    Test class: Backward Jacobian.
86    """
87    def __init__(self):
88        super(Net3, self).__init__()
89        self.origin = msb.ScalarAffine(scale=2.0, shift=1.0)
90        self.invert = msb.Invert(self.origin)
91
92    def construct(self, x_):
93        return self.invert.inverse_log_jacobian(x_),\
94               self.origin.forward_log_jacobian(x_)
95
96def test_backward_jacobian():
97    backward_jacobian = Net3()
98    x = Tensor([2.0, 3.0, 4.0, 5.0], dtype=dtype.float32)
99    ans, ans2 = backward_jacobian(x)
100    tol = 1e-6
101    assert (np.abs(ans.asnumpy() - ans2.asnumpy()) < tol).all()
102