1# Copyright 2019 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15"""test cases for gumbel_cdf""" 16import numpy as np 17import mindspore.context as context 18import mindspore.nn as nn 19import mindspore.nn.probability.bijector as msb 20from mindspore import Tensor 21from mindspore import dtype 22 23context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") 24 25class Net(nn.Cell): 26 """ 27 Test class: forward pass of bijector. 28 """ 29 def __init__(self, loc, scale): 30 super(Net, self).__init__() 31 self.bijector = msb.GumbelCDF(loc, scale) 32 33 def construct(self, x_): 34 return self.bijector.forward(x_) 35 36def test_forward(): 37 loc = np.array([0.0]) 38 scale = np.array([[1.0], [2.0]]) 39 forward = Net(loc, scale) 40 x = np.array([-2., -1., 0., 1., 2.]).astype(np.float32) 41 ans = forward(Tensor(x, dtype=dtype.float32)) 42 tol = 1e-6 43 expected = np.exp(-np.exp(-(x - loc)/scale)) 44 assert (np.abs(ans.asnumpy() - expected) < tol).all() 45 46class Net1(nn.Cell): 47 """ 48 Test class: backward pass of bijector. 49 """ 50 def __init__(self, loc, scale): 51 super(Net1, self).__init__() 52 self.bijector = msb.GumbelCDF(loc, scale) 53 54 def construct(self, x_): 55 return self.bijector.inverse(x_) 56 57def test_backward(): 58 loc = np.array([0.0]) 59 scale = np.array([[1.0], [2.0]]) 60 backward = Net1(loc, scale) 61 x = np.array([0.1, 0.25, 0.5, 0.75, 0.9]).astype(np.float32) 62 ans = backward(Tensor(x, dtype=dtype.float32)) 63 tol = 1e-6 64 expected = loc - scale * np.log(-np.log(x)) 65 assert (np.abs(ans.asnumpy() - expected) < tol).all() 66 67class Net2(nn.Cell): 68 """ 69 Test class: Forward Jacobian. 70 """ 71 def __init__(self, loc, scale): 72 super(Net2, self).__init__() 73 self.bijector = msb.GumbelCDF(loc, scale) 74 75 def construct(self, x_): 76 return self.bijector.forward_log_jacobian(x_) 77 78def test_forward_jacobian(): 79 loc = np.array([0.0]) 80 scale = np.array([[1.0], [2.0]]) 81 forward_jacobian = Net2(loc, scale) 82 x = np.array([-2., -1., 0., 1., 2.]).astype(np.float32) 83 ans = forward_jacobian(Tensor(x)) 84 z = (x - loc) / scale 85 expected = -z - np.exp(-z) - np.log(scale) 86 tol = 1e-6 87 assert (np.abs(ans.asnumpy() - expected) < tol).all() 88 89class Net3(nn.Cell): 90 """ 91 Test class: Backward Jacobian. 92 """ 93 def __init__(self, loc, scale): 94 super(Net3, self).__init__() 95 self.bijector = msb.GumbelCDF(loc, scale) 96 97 def construct(self, x_): 98 return self.bijector.inverse_log_jacobian(x_) 99 100def test_backward_jacobian(): 101 loc = np.array([0.0]) 102 scale = np.array([[1.0], [2.0]]) 103 backward_jacobian = Net3(loc, scale) 104 x = np.array([0.1, 0.2, 0.5, 0.75, 0.9]).astype(np.float32) 105 ans = backward_jacobian(Tensor(x)) 106 expected = np.log(scale / (-x * np.log(x))) 107 tol = 1e-6 108 assert (np.abs(ans.asnumpy() - expected) < tol).all() 109