1# Copyright 2020-2021 Huawei Technologies Co., Ltd 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14# ============================================================================ 15"""debug_ops""" 16from types import FunctionType, MethodType 17 18from mindspore import context 19from mindspore._c_expression import security 20from ..._checkparam import Validator as validator 21from ..._checkparam import Rel 22from ...common import dtype as mstype 23from ..primitive import prim_attr_register, Primitive, PrimitiveWithInfer 24 25 26def _check_mode(class_name): 27 """Check for PyNative mode.""" 28 mode = context.get_context('mode') 29 if mode == context.PYNATIVE_MODE: 30 raise RuntimeError(f"For '{class_name}', the operator does not support PyNative mode.") 31 32 33def _check_summary_param(name, value, class_name): 34 """Checks the name and value is valid for summary.""" 35 _check_mode(class_name) 36 n_type = name['dtype'] 37 n_value = name['value'] 38 validator.check_value_type('name', n_type, [type(mstype.string)], class_name) 39 if not n_value: 40 raise ValueError(f"For '{class_name}', the name should be valid string, but got '{n_value}'.") 41 42 v_type = value['dtype'] 43 validator.check_value_type('value', v_type, [type(mstype.tensor)], class_name) 44 45 46# Note: The return value of the summary operator is not used, 47# so there's nothing special about the return `dtype` or `shape`, any value is ok. 48# The `value` should be set to None, else summary operators may be optimized at compile graph phase, 49# it cause summary operators can not record data in constant folding scene. 50SUMMARY_RETURN_VALUE = {'dtype': mstype.int32, 'shape': [1], 'value': None} 51 52 53class ScalarSummary(Primitive): 54 """ 55 Outputs a scalar to a protocol buffer through a scalar summary operator. 56 57 Inputs: 58 - **name** (str) - The name of the input variable, it must not be an empty string. 59 - **value** (Tensor) - The value of scalar, and the shape of value must be [] or [1]. 60 61 Raises: 62 TypeError: If `name` is not a str. 63 TypeError: If `value` is not a Tensor. 64 65 Supported Platforms: 66 ``Ascend`` ``GPU`` ``CPU`` 67 68 Examples: 69 >>> import mindspore.nn as nn 70 >>> import mindspore.ops as ops 71 >>> 72 >>> 73 >>> class SummaryDemo(nn.Cell): 74 ... def __init__(self,): 75 ... super(SummaryDemo, self).__init__() 76 ... self.summary = ops.ScalarSummary() 77 ... self.add = ops.Add() 78 ... 79 ... def construct(self, x, y): 80 ... name = "x" 81 ... self.summary(name, x) 82 ... x = self.add(x, y) 83 ... return x 84 ... 85 """ 86 87 @prim_attr_register 88 def __init__(self): 89 """Initialize ScalarSummary.""" 90 91 if security.enable_security(): 92 raise ValueError('The Summary is not supported, please without `-s on` and recompile source.') 93 94 self.add_prim_attr("side_effect_io", True) 95 96 97class ImageSummary(PrimitiveWithInfer): 98 """ 99 Outputs the image tensor to protocol buffer through image summary operator. 100 101 Inputs: 102 - **name** (str) - The name of the input variable, it must not be an empty string. 103 - **value** (Tensor) - The value of image, the rank of tensor must be 4. 104 105 Raises: 106 TypeError: If `name` is not a str. 107 TypeError: If `value` is not a Tensor. 108 109 Supported Platforms: 110 ``Ascend`` ``GPU`` ``CPU`` 111 112 Examples: 113 114 >>> import mindspore.nn as nn 115 >>> import mindspore.ops as ops 116 >>> 117 >>> 118 >>> class Net(nn.Cell): 119 ... def __init__(self): 120 ... super(Net, self).__init__() 121 ... self.summary = ops.ImageSummary() 122 ... 123 ... def construct(self, x): 124 ... name = "image" 125 ... out = self.summary(name, x) 126 ... return out 127 ... 128 """ 129 130 @prim_attr_register 131 def __init__(self): 132 """Initialize ImageSummary.""" 133 134 if security.enable_security(): 135 raise ValueError('The Summary is not supported, please without `-s on` and recompile source.') 136 137 self.add_prim_attr("side_effect_io", True) 138 139 def __infer__(self, name, value): 140 _check_summary_param(name, value, self.__class__.__name__) 141 142 # The shape dim of image should be 4. 143 v_shape = value['shape'] 144 image_dim = 4 145 if len(v_shape) != image_dim: 146 raise ValueError(f"For '{self.name}', the dimension of 'value' should be {image_dim}," 147 f" but got {len(v_shape)}.") 148 149 return SUMMARY_RETURN_VALUE 150 151 152class TensorSummary(Primitive): 153 """ 154 Outputs a tensor to a protocol buffer through a tensor summary operator. 155 156 Inputs: 157 - **name** (str) - The name of the input variable. 158 - **value** (Tensor) - The value of tensor, and the rank of tensor must be greater than 0. 159 160 Raises: 161 TypeError: If `name` is not a str. 162 TypeError: If `value` is not a Tensor. 163 164 Supported Platforms: 165 ``Ascend`` ``GPU`` ``CPU`` 166 167 Examples: 168 >>> import mindspore.nn as nn 169 >>> import mindspore.ops as ops 170 >>> 171 >>> 172 >>> class SummaryDemo(nn.Cell): 173 ... def __init__(self,): 174 ... super(SummaryDemo, self).__init__() 175 ... self.summary = ops.TensorSummary() 176 ... self.add = ops.Add() 177 ... 178 ... def construct(self, x, y): 179 ... x = self.add(x, y) 180 ... name = "x" 181 ... self.summary(name, x) 182 ... return x 183 ... 184 """ 185 186 @prim_attr_register 187 def __init__(self): 188 """Initialize TensorSummary.""" 189 190 if security.enable_security(): 191 raise ValueError('The Summary is not supported, please without `-s on` and recompile source.') 192 193 self.add_prim_attr("side_effect_io", True) 194 195 196class HistogramSummary(PrimitiveWithInfer): 197 """ 198 Outputs the tensor to protocol buffer through histogram summary operator. 199 200 Inputs: 201 - **name** (str) - The name of the input variable. 202 - **value** (Tensor) - The value of tensor, and the rank of tensor must be greater than 0. 203 204 Raises: 205 TypeError: If `name` is not a str. 206 TypeError: If `value` is not a Tensor. 207 208 Supported Platforms: 209 ``Ascend`` ``GPU`` ``CPU`` 210 211 Examples: 212 >>> import mindspore.nn as nn 213 >>> import mindspore.ops as ops 214 >>> 215 >>> 216 >>> class SummaryDemo(nn.Cell): 217 ... def __init__(self,): 218 ... super(SummaryDemo, self).__init__() 219 ... self.summary = ops.HistogramSummary() 220 ... self.add = ops.Add() 221 ... 222 ... def construct(self, x, y): 223 ... x = self.add(x, y) 224 ... name = "x" 225 ... self.summary(name, x) 226 ... return x 227 ... 228 """ 229 230 @prim_attr_register 231 def __init__(self): 232 """Initialize HistogramSummary.""" 233 234 if security.enable_security(): 235 raise ValueError('The Summary is not supported, please without `-s on` and recompile source.') 236 237 self.add_prim_attr("side_effect_io", True) 238 239 def __infer__(self, name, value): 240 _check_summary_param(name, value, self.__class__.__name__) 241 242 v_shape = value['shape'] 243 # In the summary, the histogram value should be a tensor whose shape is not []. 244 if not v_shape: 245 raise ValueError(f"For '{self.name}', the type of 'value' should be tensor, " 246 f"its shape should not be [], but got {v_shape}.") 247 248 return SUMMARY_RETURN_VALUE 249 250 251class InsertGradientOf(PrimitiveWithInfer): 252 """ 253 Attaches callback to the graph node that will be invoked on the node's gradient. 254 255 Args: 256 f (Function): MindSpore's Function. Callback function. 257 258 Inputs: 259 - **input_x** (Any) - The graph node to attach to. 260 261 Outputs: 262 Tensor, returns `input_x` directly. `InsertGradientOf` does not affect the forward result. 263 264 Raises: 265 TypeError: If `f` is not a function of mindspore. 266 267 Supported Platforms: 268 ``Ascend`` ``GPU`` ``CPU`` 269 270 Examples: 271 >>> def clip_gradient(dx): 272 ... ret = dx 273 ... if ret > 1.0: 274 ... ret = 1.0 275 ... 276 ... if ret < 0.2: 277 ... ret = 0.2 278 ... 279 ... return ret 280 ... 281 >>> clip = ops.InsertGradientOf(clip_gradient) 282 >>> grad_all = ops.GradOperation(get_all=True) 283 >>> def InsertGradientOfClipDemo(): 284 ... def clip_test(x, y): 285 ... x = clip(x) 286 ... y = clip(y) 287 ... c = x * y 288 ... return c 289 ... 290 ... @ms_function 291 ... def f(x, y): 292 ... return clip_test(x, y) 293 ... 294 ... def fd(x, y): 295 ... return grad_all(clip_test)(x, y) 296 ... 297 ... print("forward: ", f(1.1, 0.1)) 298 ... print("clip_gradient:", fd(1.1, 0.1)) 299 ... 300 """ 301 302 @prim_attr_register 303 def __init__(self, f): 304 """Initialize InsertGradientOf.""" 305 self.add_prim_attr('side_effect_backprop', True) 306 self.f = f 307 308 def infer_shape(self, x_shape): 309 return x_shape 310 311 def infer_dtype(self, x_type): 312 return x_type 313 314 315class HookBackward(PrimitiveWithInfer): 316 """ 317 This operation is used as a tag to hook gradient in intermediate variables. Note that this function 318 is only supported in Pynative Mode. 319 320 Note: 321 The hook function must be defined like `hook_fn(grad) -> Tensor or None`, 322 where grad is the gradient passed to the primitive and gradient may be 323 modified and passed to next primitive. The difference between a hook function and 324 callback of InsertGradientOf is that a hook function is executed in the python 325 environment while callback will be parsed and added to the graph. 326 327 Args: 328 hook_fn (Function): Python function. hook function. 329 330 Inputs: 331 - **inputs** (Tensor) - The variable to hook. 332 333 Raises: 334 TypeError: If `inputs` are not a Tensor. 335 TypeError: If `hook_fn` is not a function of python. 336 337 Examples: 338 >>> def hook_fn(grad_out): 339 ... print(grad_out) 340 ... 341 >>> grad_all = GradOperation(get_all=True) 342 >>> hook = ops.HookBackward(hook_fn) 343 >>> def hook_test(x, y): 344 ... z = x * y 345 ... z = hook(z) 346 ... z = z * y 347 ... return z 348 ... 349 >>> def backward(x, y): 350 ... return grad_all(hook_test)(x, y) 351 ... 352 >>> output = backward(1, 2) 353 >>> print(output) 354 """ 355 356 def __init__(self, hook_fn, cell_id=""): 357 """Initialize HookBackward.""" 358 super(HookBackward, self).__init__(self.__class__.__name__) 359 self.add_prim_attr("cell_id", cell_id) 360 self.init_attrs["cell_id"] = cell_id 361 if not isinstance(hook_fn, (FunctionType, MethodType)): 362 raise TypeError(f"For '{self.name}', the type of 'hook_fn' should be python function, " 363 f"but got {type(hook_fn)}.") 364 self.register_hook(hook_fn) 365 self.cell_id = cell_id 366 367 def infer_shape(self, *inputs_shape): 368 if len(inputs_shape) == 1: 369 return inputs_shape[0] 370 return inputs_shape 371 372 def infer_dtype(self, *inputs_type): 373 if len(inputs_type) == 1: 374 return inputs_type[0] 375 return inputs_type 376 377 378class Print(PrimitiveWithInfer): 379 """ 380 Outputs the tensor or string to stdout. The outputs are printed to screen by default. 381 It can also be saved in a file by setting the parameter `print_file_path` in `context`. 382 Once set, the output will be saved in the file specified by print_file_path. 383 parse_print can be employed to reload the data. 384 For more information, please refer to :func:`mindspore.context.set_context` and :func:`mindspore.parse_print`. 385 386 387 Note: 388 In pynative mode, please use python print function. 389 In graph mode, the bool, int and float would be converted into Tensor to print, 390 str remains unchanged. 391 392 Inputs: 393 - **input_x** (Union[Tensor, bool, int, float, str]) - The graph node to attach to. 394 Supports multiple inputs which are separated by ','. 395 396 Outputs: 397 Tensor, has the same data type and shape as original `input_x`. 398 399 Raises: 400 TypeError: If `input_x` is not one of the following: Tensor, bool, int, float, str. 401 402 Supported Platforms: 403 ``Ascend`` ``GPU`` 404 405 Examples: 406 >>> class PrintDemo(nn.Cell): 407 ... def __init__(self): 408 ... super(PrintDemo, self).__init__() 409 ... self.print = ops.Print() 410 ... 411 ... def construct(self, x, y): 412 ... self.print('Print Tensor x and Tensor y:', x, y) 413 ... return x 414 ... 415 >>> x = Tensor(np.ones([2, 1]).astype(np.int32)) 416 >>> y = Tensor(np.ones([2, 2]).astype(np.int32)) 417 >>> net = PrintDemo() 418 >>> result = net(x, y) 419 Print Tensor x and Tensor y: 420 Tensor(shape=[2, 1], dtype=Int32, value= 421 [[1] 422 [1]]) 423 Tensor(shape=[2, 2], dtype=Int32, value= 424 [[1 1] 425 [1 1]]) 426 """ 427 428 @prim_attr_register 429 def __init__(self): 430 """Initialize Print.""" 431 if security.enable_security(): 432 raise ValueError( 433 'The Print is not supported, please without `-s on` and recompile source.') 434 self.add_prim_attr("side_effect_io", True) 435 436 def __call__(self, *args): 437 for arg in args: 438 print(arg) 439 440 def infer_shape(self, *inputs): 441 return [1] 442 443 def infer_dtype(self, *inputs): 444 # check argument types except the last one (io state). 445 for ele in inputs[:-1]: 446 validator.check_subclass("input", ele, 447 [mstype.tensor, mstype.int_, mstype.float_, mstype.bool_, mstype.string], 448 self.name) 449 return mstype.int32 450 451 452class Assert(PrimitiveWithInfer): 453 """ 454 Asserts that the given condition is True. 455 If input condition evaluates to false, print the list of tensor in data. 456 457 Args: 458 summarize (int): Print this many entries of each tensor. 459 460 Inputs: 461 - **condition** [Union[Tensor[bool], bool]] - The condition to evaluate. 462 - **input_data** (Union(tuple[Tensor], list[Tensor])) - The tensors to print out when condition is false. 463 464 Raises: 465 TypeError: If `summarize` is not an int. 466 TypeError: If `condition` is neither a Tensor nor a bool. 467 TypeError: If `input_data` is neither a tuple nor a list. 468 469 Examples: 470 >>> class AssertDemo(nn.Cell): 471 ... def __init__(self): 472 ... super(AssertDemo, self).__init__() 473 ... self.assert1 = ops.Assert(summarize=10) 474 ... self.add = ops.Add() 475 ... 476 ... def construct(self, x, y): 477 ... data = self.add(x, y) 478 ... self.assert1(True, [data]) 479 ... return data 480 ... 481 """ 482 483 @prim_attr_register 484 def __init__(self, summarize=3): 485 """Initialize Assert""" 486 self.summarize = validator.check_value_type("summarize", summarize, [int], self.name) 487 488 def infer_shape(self, condition, inputs): 489 condition_len = len(condition) 490 validator.check_int(condition_len, 1, Rel.LE, "condition's rank", self.name) 491 if condition_len == 1: 492 validator.check_equal_int(condition[0], 1, "condition[0]", self.name) 493 return [1] 494 495 def infer_dtype(self, condition, inputs): 496 validator.check_scalar_or_tensor_types_same({"condition": condition}, [mstype.bool_], self.name) 497 for dtype in inputs: 498 validator.check_subclass("input", dtype, [mstype.tensor], self.name) 499 return mstype.int32 500