1# Copyright © 2020 Arm Ltd. All rights reserved. 2# SPDX-License-Identifier: MIT 3""" 4This file contains functions relating to WorkloadTensors. 5WorkloadTensors are the inputTensors and outputTensors that are consumed by IRuntime.EnqueueWorkload. 6""" 7from typing import Union, List, Tuple 8import logging 9 10import numpy as np 11 12from .tensor import Tensor 13from .const_tensor import ConstTensor 14 15 16def make_input_tensors(inputs_binding_info: List[Tuple], 17 input_data: List[np.ndarray]) -> List[Tuple[int, ConstTensor]]: 18 """Returns `inputTensors` to be used with `IRuntime.EnqueueWorkload`. 19 20 This is the primary function to call when you want to produce `inputTensors` for `IRuntime.EnqueueWorkload`. 21 The output is a list of tuples containing ConstTensors with a corresponding input tensor id. 22 The output should be used directly with `IRuntime.EnqueueWorkload`. 23 This function works for single or multiple input data and binding information. 24 25 Examples: 26 Creating inputTensors. 27 >>> import pyarmnn as ann 28 >>> import numpy as np 29 >>> 30 >>> parser = ann.ITfLiteParser() 31 >>> ... 32 >>> example_image = np.array(...) 33 >>> input_binding_info = parser.GetNetworkInputBindingInfo(...) 34 >>> 35 >>> input_tensors = ann.make_input_tensors([input_binding_info], [example_image]) 36 37 Args: 38 inputs_binding_info (list of tuples): (int, `TensorInfo`) Binding information for input tensors obtained from 39 `GetNetworkInputBindingInfo`. 40 input_data (list ndarrays): Tensor data to be used for inference. 41 42 Returns: 43 list: `inputTensors` - A list of tuples (`int` , `ConstTensor`). 44 45 46 Raises: 47 ValueError: If length of `inputs_binding_info` and `input_data` are not the same. 48 """ 49 if len(inputs_binding_info) != len(input_data): 50 raise ValueError("Length of 'inputs_binding_info' does not match length of 'input_data'") 51 52 input_tensors = [] 53 54 for in_bind_info, in_data in zip(inputs_binding_info, input_data): 55 in_tensor_id = in_bind_info[0] 56 in_tensor_info = in_bind_info[1] 57 input_tensors.append((in_tensor_id, ConstTensor(in_tensor_info, in_data))) 58 59 return input_tensors 60 61 62def make_output_tensors(outputs_binding_info: List[Tuple]) -> List[Tuple[int, Tensor]]: 63 """Returns `outputTensors` to be used with `IRuntime.EnqueueWorkload`. 64 65 This is the primary function to call when you want to produce `outputTensors` for `IRuntime.EnqueueWorkload`. 66 The output is a list of tuples containing Tensors with a corresponding output tensor id. 67 The output should be used directly with `IRuntime.EnqueueWorkload`. 68 69 Examples: 70 Creating outputTensors. 71 >>> import pyarmnn as ann 72 >>> 73 >>> parser = ann.ITfLiteParser() 74 >>> ... 75 >>> output_binding_info = parser.GetNetworkOutputBindingInfo(...) 76 >>> 77 >>> output_tensors = ann.make_output_tensors([output_binding_info]) 78 79 Args: 80 outputs_binding_info (list of tuples): (int, `TensorInfo`) Binding information for output tensors obtained from 81 `GetNetworkOutputBindingInfo`. 82 83 Returns: 84 list: `outputTensors` - A list of tuples (`int`, `Tensor`). 85 """ 86 output_tensors = [] 87 88 for out_bind_info in outputs_binding_info: 89 out_tensor_id = out_bind_info[0] 90 out_tensor_info = out_bind_info[1] 91 output_tensors.append((out_tensor_id, Tensor(out_tensor_info))) 92 93 return output_tensors 94 95 96def workload_tensors_to_ndarray(workload_tensors: List[Tuple[int, Union[Tensor, ConstTensor]]]) -> List[np.ndarray]: 97 """Returns a list of the underlying tensor data as ndarrays from `inputTensors` or `outputTensors`. 98 99 We refer to `inputTensors` and `outputTensors` as workload tensors because 100 they are used with `IRuntime.EnqueueWorkload`. 101 Although this function can be used on either `inputTensors` or `outputTensors` the main use of this function 102 is to collect results from `outputTensors` after `IRuntime.EnqueueWorkload` has been called. 103 104 Examples: 105 Getting results after inference. 106 >>> import pyarmnn as ann 107 >>> 108 >>> ... 109 >>> runtime = ann.IRuntime(...) 110 >>> ... 111 >>> runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) 112 >>> 113 >>> inference_results = workload_tensors_to_ndarray(output_tensors) 114 115 Args: 116 workload_tensors (inputTensors or outputTensors): `inputTensors` or `outputTensors` to get data from. See 117 `make_input_tensors` and `make_output_tensors`. 118 119 Returns: 120 list: List of `ndarrays` for the underlying tensor data from given `inputTensors` or `outputTensors`. 121 """ 122 arrays = [] 123 for index, (_, tensor) in enumerate(workload_tensors): 124 arrays.append(tensor.get_memory_area().reshape(list(tensor.GetShape()))) 125 logging.info("Workload tensor {} shape: {}".format(index, tensor.GetShape())) 126 127 return arrays 128