1# Copyright 2019 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15import numpy as np 16 17import mindspore.context as context 18import mindspore.nn as nn 19import mindspore.ops.composite as C 20from mindspore import Tensor 21from mindspore.ops import operations as P 22 23context.set_context(mode=context.GRAPH_MODE, device_target="Ascend") 24 25 26class Net(nn.Cell): 27 def __init__(self): 28 super(Net, self).__init__() 29 self.add = P.AddN() 30 31 def construct(self, x, y): 32 return self.add((x, y)) 33 34 35def test_net(): 36 x = np.random.randn(1, 3, 3, 4).astype(np.float32) 37 y = np.random.randn(1, 3, 3, 4).astype(np.float32) 38 add = Net() 39 output = add(Tensor(x), Tensor(y)) 40 print(x) 41 print(y) 42 print(output.asnumpy()) 43 x = 1.0 44 y = 2.0 45 expect = 3.0 46 add = Net() 47 output = add(x, y) 48 assert output == expect 49 50 51def test_grad_addn_with_list(): 52 grad_op = C.GradOperation(get_all=True) 53 class AddN(nn.Cell): 54 def __init__(self): 55 super().__init__() 56 self.add_n = P.AddN() 57 58 def construct(self, a, b): 59 return self.add_n([a, b]) 60 61 inp = Tensor(np.ones([128, 96]).astype(np.float32)) 62 grad_op(AddN())(inp, inp) 63