1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15"""Utility functions to help distribution class.""" 16import numpy as np 17from mindspore.ops import operations as P 18from mindspore.ops import functional as F 19from mindspore.ops.operations import _inner_ops as inner 20from mindspore.ops.primitive import constexpr 21from mindspore.common import dtype as mstype 22from .utils import CheckTensor 23 24 25@constexpr(check=False) 26def _check_tensor(x, name): 27 CheckTensor()(x, name) 28 return x 29 30 31def exp_generic(input_x): 32 """ 33 Exp op on Ascend doesn't support int types. 34 Fix this with casting the type. 35 """ 36 exp = P.Exp() 37 cast = P.Cast() 38 dtype = P.DType() 39 checktype = inner.IsSubClass() 40 41 if not checktype(dtype(input_x), mstype.float_): 42 input_x = cast(input_x, mstype.float32) 43 return exp(input_x) 44 45 46def log_generic(input_x): 47 """ 48 Log op on Ascend is calculated as log(abs(x)). 49 Fix this with putting negative values as nan. 50 And log op on Ascend doesn't support int types. 51 Fix this with casting the type. 52 """ 53 log = P.Log() 54 less = P.Less() 55 lessequal = P.LessEqual() 56 cast = P.Cast() 57 dtype = P.DType() 58 shape = P.Shape() 59 select = P.Select() 60 checktype = inner.IsSubClass() 61 62 if not checktype(dtype(input_x), mstype.float_): 63 input_x = cast(input_x, mstype.float32) 64 nan = F.fill(dtype(input_x), shape(input_x), np.nan) 65 neginf = F.fill(dtype(input_x), shape(input_x), -np.inf) 66 neg_x = less(input_x, 0.0) 67 nonpos_x = lessequal(input_x, 0.0) 68 log_x = log(input_x) 69 result = select( 70 nonpos_x, neginf, log_x) 71 return select(neg_x, nan, result) 72 73 74def log_generic_with_check(x): 75 """ 76 log generic with input check 77 """ 78 _check_tensor(x, "the input of log_generic") 79 return log_generic(x) 80 81 82def log1p_generic(x): 83 """ 84 Log1p ops on GPU device or when device_target == GPU. 85 """ 86 return log_generic(x + 1.0) 87 88 89def broadcast_to(x, target): 90 """ 91 Broadcast x to the shape of target. 92 """ 93 shape = P.Shape() 94 if shape(x) == shape(target): 95 return x 96 return P.BroadcastTo(shape(target))(x) 97