1# Copyright 2020 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15 16import numpy as np 17import pytest 18 19import mindspore.context as context 20import mindspore.nn as nn 21from mindspore import Tensor 22from mindspore.common import dtype as mstype 23from mindspore.ops import operations as P 24 25context.set_context(mode=context.GRAPH_MODE, device_target='CPU') 26 27 28class Slice(nn.Cell): 29 def __init__(self): 30 super(Slice, self).__init__() 31 self.slice = P.Slice() 32 33 def construct(self, x): 34 return self.slice(x, (0, 1, 0), (2, 1, 3)) 35 36 37@pytest.mark.level0 38@pytest.mark.platform_x86_cpu 39@pytest.mark.env_onecard 40def test_slice(): 41 x = Tensor( 42 np.array([[[1, -1, 1], [2, -2, 2]], [[3, -3, 3], [4, -4, 4]], [[5, -5, 5], [6, -6, 6]]]), mstype.float32) 43 expect = [[[2., -2., 2.]], 44 [[4., -4., 4.]]] 45 46 slice_op = Slice() 47 output = slice_op(x) 48 assert (output.asnumpy() == expect).all() 49 50 51class Slice2(nn.Cell): 52 def __init__(self): 53 super(Slice2, self).__init__() 54 self.slice = P.Slice() 55 56 def construct(self, x): 57 return self.slice(x, (1, 0, 0), (1, 2, 3)) 58 59 60@pytest.mark.level0 61@pytest.mark.platform_x86_cpu 62@pytest.mark.env_onecard 63def test_slice2(): 64 x = Tensor(np.arange(3 * 2 * 3).reshape(3, 2, 3), mstype.float32) 65 expect = [[[6., 7., 8.], 66 [9., 10., 11.]]] 67 68 slice_op = Slice2() 69 output = slice_op(x) 70 assert (output.asnumpy() == expect).all() 71 72def test_slice_float64(): 73 data = Tensor(np.array([[[1, 1, 1], [2, 2, 2]], 74 [[3, 3, 3], [4, 4, 4]], 75 [[5, 5, 5], [6, 6, 6]]]).astype(np.float64)) 76 slice_op = P.Slice() 77 output = slice_op(data, (1, 0, 0), (1, 1, 3)) 78 expect = [[[3.0, 3.0, 3.0]]] 79 assert (output.asnumpy() == expect).all() 80 81class Slice3(nn.Cell): 82 def __init__(self): 83 super(Slice3, self).__init__() 84 self.relu = nn.ReLU() 85 86 def construct(self, x): 87 return (x[..., -1], x[..., 2:1:-1], x[1:3:1, 0, ...], x[-1, 0, ...]) 88 89 90@pytest.mark.level0 91@pytest.mark.platform_x86_cpu 92@pytest.mark.env_onecard 93def test_slice3(): 94 inputx = np.random.rand(4, 4, 4, 4).astype(np.float32) 95 x = Tensor(inputx) 96 slice_op = Slice3() 97 output = slice_op(x) 98 assert (output[0].asnumpy() == inputx[..., -1]).all() 99 assert (output[1].asnumpy() == inputx[..., 2:1:-1]).all() 100 assert (output[2].asnumpy() == inputx[1:3:1, 0, ...]).all() 101 assert (output[3].asnumpy() == inputx[-1, 0, ...]).all() 102 103 104class Slice4(nn.Cell): 105 def __init__(self): 106 super(Slice4, self).__init__() 107 self.relu = nn.ReLU() 108 109 def construct(self, x): 110 return x[:10:1, :, 2:3:1] 111 112 113@pytest.mark.level0 114@pytest.mark.platform_x86_cpu 115@pytest.mark.env_onecard 116def test_slice4(): 117 inputx = np.random.rand(4, 4, 4).astype(np.float32) 118 x = Tensor(inputx) 119 slice_op = Slice4() 120 output = slice_op(x) 121 assert (output.asnumpy() == inputx[:10:1, :, 2:3:1]).all() 122 123 124class Slice5(nn.Cell): 125 def __init__(self, begin, size): 126 super(Slice5, self).__init__() 127 self.relu = nn.ReLU() 128 self.slice = P.Slice() 129 self.begin = begin 130 self.size = size 131 132 def construct(self, x): 133 return self.slice(x, self.begin, self.size) 134 135 136@pytest.mark.level0 137@pytest.mark.platform_x86_cpu 138@pytest.mark.env_onecard 139def test_slice5(): 140 inputx = np.arange(3 * 5 * 4).reshape(3, 5, 4).astype(np.float32) 141 x = Tensor(inputx) 142 begin = (0, 1, 0) 143 size = (3, 4, 4) 144 slice_op = Slice5(begin, size) 145 output = slice_op(x) 146 assert (output.asnumpy() == inputx[0:3:1, 1:5:1, 0:4:1]).all() 147 148 149class Slice6(nn.Cell): 150 def __init__(self): 151 super(Slice6, self).__init__() 152 self.relu = nn.ReLU() 153 154 def construct(self, x): 155 return (x[-10:], x[-5:10:2, :, :], x[-10:10:1, :, -10:10:1]) 156 157 158@pytest.mark.level0 159@pytest.mark.platform_x86_cpu 160@pytest.mark.env_onecard 161def test_slice6(): 162 inputx = np.random.rand(4, 4, 4).astype(np.float32) 163 x = Tensor(inputx) 164 slice_op = Slice6() 165 output = slice_op(x) 166 assert (output[0].asnumpy() == inputx[-10:]).all() 167 assert (output[1].asnumpy() == inputx[-5:10:2, :, :]).all() 168 assert (output[2].asnumpy() == inputx[-10:10:1, :, -10:10:1]).all() 169 170 171class StridedSlice(nn.Cell): 172 def __init__(self, begin, end, stride): 173 super(StridedSlice, self).__init__() 174 self.begin = begin 175 self.end = end 176 self.stride = stride 177 self.stride_slice = P.StridedSlice() 178 179 def construct(self, x): 180 return self.stride_slice(x, self.begin, self.end, self.stride) 181 182@pytest.mark.level0 183@pytest.mark.platform_x86_cpu 184@pytest.mark.env_onecard 185def test_strided_slice_bool_type(): 186 input_x = Tensor([[[False, False, True], [False, True, False]], [[False, True, False], [True, False, False]], 187 [[False, True, True], [True, False, True]]], mstype.bool_) 188 begin = (1, 0, 0) 189 end = (2, 1, 3) 190 stride = (1, 1, 1) 191 slice_op = StridedSlice(begin, end, stride) 192 output = slice_op(input_x) 193 expected_output = np.array([False, True, False]) 194 assert (output.asnumpy() == expected_output).all() 195 196if __name__ == '__main__': 197 test_slice() 198 test_slice2() 199 test_slice3() 200 test_slice4() 201 test_slice5() 202 test_slice6() 203 test_strided_slice_bool_type() 204