• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020-2021 Huawei Technologies Co., Ltd
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14# ============================================================================
15
16import numpy as np
17import pytest
18
19import mindspore.context as context
20from mindspore.common.tensor import Tensor
21from mindspore.ops import operations as P
22
23
24@pytest.mark.level0
25@pytest.mark.platform_x86_gpu_training
26@pytest.mark.env_onecard
27def test_broadcast():
28    context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
29
30    shape = (3, 4, 5, 6)
31    x_np = np.random.rand(3, 1, 5, 1).astype(np.float32)
32    output = P.BroadcastTo(shape)(Tensor(x_np))
33    expect = np.broadcast_to(x_np, shape)
34    assert np.allclose(output.asnumpy(), expect)
35
36    x1_np = np.random.rand(3, 1, 5, 1).astype(np.float16)
37    output = P.BroadcastTo(shape)(Tensor(x1_np))
38    expect = np.broadcast_to(x1_np, shape)
39    assert np.allclose(output.asnumpy(), expect)
40
41    shape = (2, 3, 4, 5)
42    x1_np = np.random.rand(4, 5).astype(np.float32)
43    output = P.BroadcastTo(shape)(Tensor(x1_np))
44    expect = np.broadcast_to(x1_np, shape)
45    assert np.allclose(output.asnumpy(), expect)
46
47
48@pytest.mark.level0
49@pytest.mark.platform_x86_gpu_training
50@pytest.mark.env_onecard
51def test_broadcast_dyn_init():
52    """
53    Test running the op with -1's in the init shape to support varied inputs.
54    """
55    context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
56
57    ms_shape = (-1, -1, 5, 6)
58    np_shape = (3, 4, 5, 6)
59    x_np = np.random.rand(3, 1, 5, 1).astype(np.float32)
60    output = P.BroadcastTo(ms_shape)(Tensor(x_np))
61    expect = np.broadcast_to(x_np, np_shape)
62    assert np.allclose(output.asnumpy(), expect)
63
64    x1_np = np.random.rand(3, 1, 5, 1).astype(np.float16)
65    output = P.BroadcastTo(ms_shape)(Tensor(x1_np))
66    expect = np.broadcast_to(x1_np, np_shape)
67    assert np.allclose(output.asnumpy(), expect)
68
69    ms_shape = (2, 3, -1, -1)
70    np_shape = (2, 3, 4, 5)
71    x1_np = np.random.rand(4, 5).astype(np.float32)
72    output = P.BroadcastTo(ms_shape)(Tensor(x1_np))
73    expect = np.broadcast_to(x1_np, np_shape)
74    assert np.allclose(output.asnumpy(), expect)
75
76
77@pytest.mark.level0
78@pytest.mark.platform_x86_gpu_training
79@pytest.mark.env_onecard
80def test_broadcast_dyn_invalid_init():
81    """
82    Test running the op with -1's in the init shape in incorrect positions.
83    Expected to fail.
84    """
85    context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
86    ms_shape = (2, -1, 4, 5)
87    x_np = np.random.rand(4, 5).astype(np.float32)
88    with pytest.raises(ValueError):
89        P.BroadcastTo(ms_shape)(Tensor(x_np))
90
91    context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
92    ms_shape = (-1, 1, -1, -1)
93    x_np = np.random.rand(4, 5).astype(np.float32)
94    with pytest.raises(ValueError):
95        P.BroadcastTo(ms_shape)(Tensor(x_np))
96