1# Copyright 2019 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15import numpy as np 16import pytest 17 18import mindspore.context as context 19import mindspore.nn as nn 20from mindspore import Tensor 21from mindspore.ops import operations as P 22from mindspore.ops.operations import _inner_ops as inner 23 24 25class Net(nn.Cell): 26 def __init__(self): 27 super(Net, self).__init__() 28 self.expand_dims = P.ExpandDims() 29 30 def construct(self, tensor): 31 return self.expand_dims(tensor, -1) 32 33 34class NetDynamic(nn.Cell): 35 def __init__(self): 36 super(NetDynamic, self).__init__() 37 self.conv = inner.GpuConvertToDynamicShape() 38 self.expand_dims = P.ExpandDims() 39 40 def construct(self, x): 41 x_conv = self.conv(x) 42 return self.expand_dims(x_conv, -1) 43 44 45@pytest.mark.level1 46@pytest.mark.platform_x86_gpu_training 47@pytest.mark.env_onecard 48def test_net_bool(): 49 context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 50 x = np.random.randn(1, 16, 1, 1).astype(np.bool) 51 net = NetDynamic() 52 output = net(Tensor(x)) 53 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 54 55 56@pytest.mark.level1 57@pytest.mark.platform_x86_gpu_training 58@pytest.mark.env_onecard 59def test_net_int8(): 60 context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 61 x = np.random.randn(1, 16, 1, 1).astype(np.int8) 62 net = NetDynamic() 63 output = net(Tensor(x)) 64 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 65 66 67@pytest.mark.level1 68@pytest.mark.platform_x86_gpu_training 69@pytest.mark.env_onecard 70def test_net_uint8(): 71 context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 72 x = np.random.randn(1, 16, 1, 1).astype(np.uint8) 73 net = Net() 74 output = net(Tensor(x)) 75 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 76 77 78@pytest.mark.level1 79@pytest.mark.platform_x86_gpu_training 80@pytest.mark.env_onecard 81def test_net_int16(): 82 context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") 83 x = np.random.randn(1, 16, 1, 1).astype(np.int16) 84 net = Net() 85 output = net(Tensor(x)) 86 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 87 88 89@pytest.mark.level1 90@pytest.mark.platform_x86_gpu_training 91@pytest.mark.env_onecard 92def test_net_int32(): 93 context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 94 x = np.random.randn(1, 16, 1, 1).astype(np.int32) 95 net = Net() 96 output = net(Tensor(x)) 97 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 98 99 100@pytest.mark.level1 101@pytest.mark.platform_x86_gpu_training 102@pytest.mark.env_onecard 103def test_net_int64(): 104 context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") 105 x = np.random.randn(1, 16, 1, 1).astype(np.int64) 106 net = Net() 107 output = net(Tensor(x)) 108 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 109 110 111@pytest.mark.level1 112@pytest.mark.platform_x86_gpu_training 113@pytest.mark.env_onecard 114def test_net_float16(): 115 context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 116 x = np.random.randn(1, 16, 1, 1).astype(np.float16) 117 net = Net() 118 output = net(Tensor(x)) 119 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 120 121 122@pytest.mark.level0 123@pytest.mark.platform_x86_gpu_training 124@pytest.mark.env_onecard 125def test_net_float32(): 126 context.set_context(mode=context.PYNATIVE_MODE, device_target="GPU") 127 x = np.random.randn(1, 16, 1, 1).astype(np.float32) 128 net = Net() 129 output = net(Tensor(x)) 130 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 131 132 133@pytest.mark.level0 134@pytest.mark.platform_x86_gpu_training 135@pytest.mark.env_onecard 136def test_net_float64(): 137 context.set_context(mode=context.GRAPH_MODE, device_target="GPU") 138 x = np.random.randn(1, 16, 1, 1).astype(np.float64) 139 net = Net() 140 output = net(Tensor(x)) 141 assert np.all(output.asnumpy() == np.expand_dims(x, -1)) 142