1# Copyright 2022 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15import numpy as np 16import pytest 17 18import mindspore as ms 19from mindspore import Tensor, nn 20import mindspore.ops.function as F 21 22 23class NetSoftmin(nn.Cell): 24 def construct(self, x, axis=-1): 25 return F.softmin(x, axis) 26 27 28@pytest.mark.level2 29@pytest.mark.platform_x86_cpu 30@pytest.mark.platform_arm_cpu 31@pytest.mark.platform_x86_gpu_training 32@pytest.mark.platform_arm_ascend_training 33@pytest.mark.platform_x86_ascend_training 34@pytest.mark.env_onecard 35@pytest.mark.parametrize('mode', [ms.GRAPH_MODE, ms.PYNATIVE_MODE]) 36def test_net(mode): 37 """ 38 Feature: test softmin op 39 Description: verify the result of softmin 40 Expectation: assertion success 41 """ 42 ms.set_context(mode=mode) 43 x = Tensor(np.array([1, 2, 3, 4, 5]), ms.float32) 44 softmin = NetSoftmin() 45 output = softmin(x) 46 np_out = np.array([0.6364086, 0.23412165, 0.08612854, 0.03168492, 0.01165623]) 47 assert np.allclose(output.asnumpy(), np_out) 48