1# Copyright © 2020 Arm Ltd. All rights reserved. 2# SPDX-License-Identifier: MIT 3import os 4 5import pytest 6import pyarmnn as ann 7import numpy as np 8 9 10def test_TfLiteParserOptions_default_values(): 11 parserOptions = ann.TfLiteParserOptions() 12 assert parserOptions.m_InferAndValidate == False 13 assert parserOptions.m_StandInLayerForUnsupported == False 14 15 16@pytest.fixture() 17def parser(shared_data_folder): 18 """ 19 Parse and setup the test network to be used for the tests below 20 """ 21 parser = ann.ITfLiteParser() 22 parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, 'mock_model.tflite')) 23 24 yield parser 25 26 27def test_tflite_parser_swig_destroy(): 28 assert ann.ITfLiteParser.__swig_destroy__, "There is a swig python destructor defined" 29 assert ann.ITfLiteParser.__swig_destroy__.__name__ == "delete_ITfLiteParser" 30 31 32def test_check_tflite_parser_swig_ownership(parser): 33 # Check to see that SWIG has ownership for parser. This instructs SWIG to take 34 # ownership of the return value. This allows the value to be automatically 35 # garbage-collected when it is no longer in use 36 assert parser.thisown 37 38 39def test_tflite_parser_with_optional_options(): 40 parserOptions = ann.TfLiteParserOptions() 41 parserOptions.m_InferAndValidate = True 42 parser = ann.ITfLiteParser(parserOptions) 43 assert parser.thisown 44 45 46def create_with_opt() : 47 parserOptions = ann.TfLiteParserOptions() 48 parserOptions.m_InferAndValidate = True 49 return ann.ITfLiteParser(parserOptions) 50 51 52def test_tflite_parser_with_optional_options_out_of_scope(shared_data_folder): 53 parser = create_with_opt() 54 network = parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, "mock_model.tflite")) 55 56 graphs_count = parser.GetSubgraphCount() 57 graph_id = graphs_count - 1 58 59 input_names = parser.GetSubgraphInputTensorNames(graph_id) 60 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, input_names[0]) 61 62 output_names = parser.GetSubgraphOutputTensorNames(graph_id) 63 64 preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] 65 66 options = ann.CreationOptions() 67 runtime = ann.IRuntime(options) 68 69 opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) 70 assert 0 == len(messages) 71 72 net_id, messages = runtime.LoadNetwork(opt_network) 73 assert "" == messages 74 75 76def test_tflite_get_sub_graph_count(parser): 77 graphs_count = parser.GetSubgraphCount() 78 assert graphs_count == 1 79 80 81def test_tflite_get_network_input_binding_info(parser): 82 graphs_count = parser.GetSubgraphCount() 83 graph_id = graphs_count - 1 84 85 input_names = parser.GetSubgraphInputTensorNames(graph_id) 86 87 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, input_names[0]) 88 89 tensor = input_binding_info[1] 90 assert tensor.GetDataType() == 2 91 assert tensor.GetNumDimensions() == 4 92 assert tensor.GetNumElements() == 784 93 assert tensor.GetQuantizationOffset() == 128 94 assert tensor.GetQuantizationScale() == 0.007843137718737125 95 96 97def test_tflite_get_network_output_binding_info(parser): 98 graphs_count = parser.GetSubgraphCount() 99 graph_id = graphs_count - 1 100 101 output_names = parser.GetSubgraphOutputTensorNames(graph_id) 102 103 output_binding_info1 = parser.GetNetworkOutputBindingInfo(graph_id, output_names[0]) 104 105 # Check the tensor info retrieved from GetNetworkOutputBindingInfo 106 tensor1 = output_binding_info1[1] 107 108 assert tensor1.GetDataType() == 2 109 assert tensor1.GetNumDimensions() == 2 110 assert tensor1.GetNumElements() == 10 111 assert tensor1.GetQuantizationOffset() == 0 112 assert tensor1.GetQuantizationScale() == 0.00390625 113 114 115def test_tflite_get_subgraph_input_tensor_names(parser): 116 graphs_count = parser.GetSubgraphCount() 117 graph_id = graphs_count - 1 118 119 input_names = parser.GetSubgraphInputTensorNames(graph_id) 120 121 assert input_names == ('input_1',) 122 123 124def test_tflite_get_subgraph_output_tensor_names(parser): 125 graphs_count = parser.GetSubgraphCount() 126 graph_id = graphs_count - 1 127 128 output_names = parser.GetSubgraphOutputTensorNames(graph_id) 129 130 assert output_names[0] == 'dense/Softmax' 131 132 133def test_tflite_filenotfound_exception(shared_data_folder): 134 parser = ann.ITfLiteParser() 135 136 with pytest.raises(RuntimeError) as err: 137 parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, 'some_unknown_network.tflite')) 138 139 # Only check for part of the exception since the exception returns 140 # absolute path which will change on different machines. 141 assert 'Cannot find the file' in str(err.value) 142 143 144def test_tflite_parser_end_to_end(shared_data_folder): 145 parser = ann.ITfLiteParser() 146 147 network = parser.CreateNetworkFromBinaryFile(os.path.join(shared_data_folder, "mock_model.tflite")) 148 149 graphs_count = parser.GetSubgraphCount() 150 graph_id = graphs_count - 1 151 152 input_names = parser.GetSubgraphInputTensorNames(graph_id) 153 input_binding_info = parser.GetNetworkInputBindingInfo(graph_id, input_names[0]) 154 155 output_names = parser.GetSubgraphOutputTensorNames(graph_id) 156 157 preferred_backends = [ann.BackendId('CpuAcc'), ann.BackendId('CpuRef')] 158 159 options = ann.CreationOptions() 160 runtime = ann.IRuntime(options) 161 162 opt_network, messages = ann.Optimize(network, preferred_backends, runtime.GetDeviceSpec(), ann.OptimizerOptions()) 163 assert 0 == len(messages) 164 165 net_id, messages = runtime.LoadNetwork(opt_network) 166 assert "" == messages 167 168 # Load test image data stored in input_lite.npy 169 input_tensor_data = np.load(os.path.join(shared_data_folder, 'tflite_parser/input_lite.npy')) 170 input_tensors = ann.make_input_tensors([input_binding_info], [input_tensor_data]) 171 172 output_tensors = [] 173 for index, output_name in enumerate(output_names): 174 out_bind_info = parser.GetNetworkOutputBindingInfo(graph_id, output_name) 175 out_tensor_info = out_bind_info[1] 176 out_tensor_id = out_bind_info[0] 177 output_tensors.append((out_tensor_id, 178 ann.Tensor(out_tensor_info))) 179 180 runtime.EnqueueWorkload(net_id, input_tensors, output_tensors) 181 182 output_vectors = [] 183 for index, out_tensor in enumerate(output_tensors): 184 output_vectors.append(out_tensor[1].get_memory_area()) 185 186 # Load golden output file for result comparison. 187 expected_outputs = np.load(os.path.join(shared_data_folder, 'tflite_parser/golden_output_lite.npy')) 188 189 # Check that output matches golden output 190 assert (expected_outputs == output_vectors[0]).all() 191