• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright © 2021 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6 
7 #include <CommonTestUtils.hpp>
8 
9 #include <ResolveType.hpp>
10 
11 #include <armnn/INetwork.hpp>
12 
13 #include <armnn/utility/NumericCast.hpp>
14 
15 #include <doctest/doctest.h>
16 
17 #include <vector>
18 
19 namespace
20 {
21 
CreateFullyConnectedNetworkNonConstWeights(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const armnn::TensorInfo & weightsTensorInfo,armnn::FullyConnectedDescriptor descriptor)22 armnn::INetworkPtr CreateFullyConnectedNetworkNonConstWeights(const armnn::TensorInfo& inputTensorInfo,
23                                                               const armnn::TensorInfo& outputTensorInfo,
24                                                               const armnn::TensorInfo& weightsTensorInfo,
25                                                               armnn::FullyConnectedDescriptor descriptor)
26 {
27     armnn::INetworkPtr network(armnn::INetwork::Create());
28 
29     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
30     armnn::IConnectableLayer* weightsInputLayer   = network->AddInputLayer(1, "Weights_Input");
31     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
32     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
33 
34     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
35     Connect(weightsInputLayer, fullyConnectedLayer, weightsTensorInfo, 0, 1);
36     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
37 
38     return network;
39 }
40 
CreateFullyConnectedNetworkNonConstWeightsConstBias(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const armnn::TensorInfo & weightsTensorInfo,const armnn::TensorInfo & biasTensorInfo,const armnn::ConstTensor & biasConstantTensor,armnn::FullyConnectedDescriptor descriptor)41 armnn::INetworkPtr CreateFullyConnectedNetworkNonConstWeightsConstBias(const armnn::TensorInfo& inputTensorInfo,
42                                                                        const armnn::TensorInfo& outputTensorInfo,
43                                                                        const armnn::TensorInfo& weightsTensorInfo,
44                                                                        const armnn::TensorInfo& biasTensorInfo,
45                                                                        const armnn::ConstTensor& biasConstantTensor,
46                                                                        armnn::FullyConnectedDescriptor descriptor)
47 {
48     armnn::INetworkPtr network(armnn::INetwork::Create());
49 
50     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
51     armnn::IConnectableLayer* weightsInputLayer   = network->AddInputLayer(1, "Weights_Input");
52     armnn::IConnectableLayer* biasLayer  = network->AddConstantLayer(biasConstantTensor, "Weights");
53     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
54     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
55 
56     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
57     Connect(weightsInputLayer, fullyConnectedLayer, weightsTensorInfo, 0, 1);
58     Connect(biasLayer, fullyConnectedLayer, biasTensorInfo, 0, 2);
59     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
60 
61     return network;
62 }
63 
CreateFullyConnectedNetworkConstWeightsNonConstBias(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const armnn::TensorInfo & weightsTensorInfo,const armnn::TensorInfo & biasTensorInfo,const armnn::ConstTensor & weightsConstantTensor,armnn::FullyConnectedDescriptor descriptor)64 armnn::INetworkPtr CreateFullyConnectedNetworkConstWeightsNonConstBias(const armnn::TensorInfo& inputTensorInfo,
65                                                                        const armnn::TensorInfo& outputTensorInfo,
66                                                                        const armnn::TensorInfo& weightsTensorInfo,
67                                                                        const armnn::TensorInfo& biasTensorInfo,
68                                                                        const armnn::ConstTensor& weightsConstantTensor,
69                                                                        armnn::FullyConnectedDescriptor descriptor)
70 {
71     armnn::INetworkPtr network(armnn::INetwork::Create());
72 
73     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
74     armnn::IConnectableLayer* weightsLayer  = network->AddConstantLayer(weightsConstantTensor, "Weights");
75     armnn::IConnectableLayer* biasLayer   = network->AddInputLayer(2, "Bias_Input");
76     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
77     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
78 
79     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
80     Connect(weightsLayer, fullyConnectedLayer, weightsTensorInfo, 0, 1);
81     Connect(biasLayer, fullyConnectedLayer, biasTensorInfo, 0, 2);
82     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
83 
84     return network;
85 }
86 
CreateFullyConnectedNetworkNoTensorInfoConstWeights(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const armnn::ConstTensor & weightsConstantTensor,armnn::FullyConnectedDescriptor descriptor)87 armnn::INetworkPtr CreateFullyConnectedNetworkNoTensorInfoConstWeights(const armnn::TensorInfo& inputTensorInfo,
88                                                                        const armnn::TensorInfo& outputTensorInfo,
89                                                                        const armnn::ConstTensor& weightsConstantTensor,
90                                                                        armnn::FullyConnectedDescriptor descriptor)
91 {
92     armnn::INetworkPtr network(armnn::INetwork::Create());
93 
94     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
95     armnn::IConnectableLayer* weightsLayer  = network->AddConstantLayer(weightsConstantTensor, "Weights");
96     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
97     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
98 
99     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
100     weightsLayer->GetOutputSlot(0).Connect(fullyConnectedLayer->GetInputSlot(1));
101     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
102 
103     return network;
104 }
105 
CreateFullyConnectedNetworkNoConnectedWeightsExplicit(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const armnn::TensorInfo & biasTensorInfo,armnn::FullyConnectedDescriptor descriptor)106 armnn::INetworkPtr CreateFullyConnectedNetworkNoConnectedWeightsExplicit(const armnn::TensorInfo& inputTensorInfo,
107                                                                          const armnn::TensorInfo& outputTensorInfo,
108                                                                          const armnn::TensorInfo& biasTensorInfo,
109                                                                          armnn::FullyConnectedDescriptor descriptor)
110 {
111     armnn::INetworkPtr network(armnn::INetwork::Create());
112 
113 
114     ConstTensor biases;
115 
116     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
117     armnn::IConnectableLayer* biasLayer   = network->AddConstantLayer(biases, "Bias_Input");
118     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
119     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
120 
121     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
122     Connect(biasLayer, fullyConnectedLayer, biasTensorInfo, 0, 2);
123     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
124 
125     return network;
126 }
127 
CreateFullyConnectedNetworkNoConnectedWeightsAndBias(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,armnn::FullyConnectedDescriptor descriptor)128 armnn::INetworkPtr CreateFullyConnectedNetworkNoConnectedWeightsAndBias(const armnn::TensorInfo& inputTensorInfo,
129                                                                         const armnn::TensorInfo& outputTensorInfo,
130                                                                         armnn::FullyConnectedDescriptor descriptor)
131 {
132     armnn::INetworkPtr network(armnn::INetwork::Create());
133 
134     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
135     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
136     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
137 
138     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
139     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
140 
141     return network;
142 }
143 
CreateFullyConnectedNetworkNoConnectedBiasExplicit(const armnn::TensorInfo & inputTensorInfo,const armnn::TensorInfo & outputTensorInfo,const armnn::TensorInfo & weightsTensorInfo,const armnn::ConstTensor & weightsConstantTensor,armnn::FullyConnectedDescriptor descriptor)144 armnn::INetworkPtr CreateFullyConnectedNetworkNoConnectedBiasExplicit(const armnn::TensorInfo& inputTensorInfo,
145                                                                       const armnn::TensorInfo& outputTensorInfo,
146                                                                       const armnn::TensorInfo& weightsTensorInfo,
147                                                                       const armnn::ConstTensor& weightsConstantTensor,
148                                                                       armnn::FullyConnectedDescriptor descriptor)
149 {
150     armnn::INetworkPtr network(armnn::INetwork::Create());
151 
152     armnn::IConnectableLayer* inputLayer  = network->AddInputLayer(0, "Input");
153     armnn::IConnectableLayer* weightsLayer  = network->AddConstantLayer(weightsConstantTensor, "Weights");
154     armnn::IConnectableLayer* fullyConnectedLayer = network->AddFullyConnectedLayer(descriptor, "Fully_Connected");
155     armnn::IConnectableLayer* outputLayer = network->AddOutputLayer(0, "Output");
156 
157     Connect(inputLayer, fullyConnectedLayer, inputTensorInfo, 0, 0);
158     Connect(weightsLayer, fullyConnectedLayer, weightsTensorInfo, 0, 1);
159     Connect(fullyConnectedLayer, outputLayer, outputTensorInfo, 0, 0);
160 
161     return network;
162 }
163 
164 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
FullyConnectedWithDynamicWeightsEndToEnd(const std::vector<armnn::BackendId> & backends)165 void FullyConnectedWithDynamicWeightsEndToEnd(const std::vector<armnn::BackendId>& backends)
166 {
167     using namespace armnn;
168 
169     armnn::TensorInfo inputTensorInfo({ 1, 1, 2, 3 }, ArmnnType);
170     inputTensorInfo.SetQuantizationScale(0.1f);
171     inputTensorInfo.SetQuantizationOffset(63);
172     inputTensorInfo.SetConstant(true);
173 
174     armnn::TensorInfo outputTensorInfo({ 1, 2 }, ArmnnType);
175     outputTensorInfo.SetQuantizationScale(5.f);
176     outputTensorInfo.SetQuantizationOffset(10);
177 
178     armnn::TensorInfo weightsTensorInfo({ 2, 6 }, ArmnnType);
179     weightsTensorInfo.SetQuantizationScale(0.2f);
180     weightsTensorInfo.SetQuantizationOffset(93);
181     weightsTensorInfo.SetConstant(true);
182 
183     FullyConnectedDescriptor descriptor;
184     descriptor.m_ConstantWeights = false;
185     descriptor.m_BiasEnabled     = false;
186     descriptor.m_TransposeWeightMatrix = true;
187 
188     std::vector<T> inputData {
189         -1.2f, 6.1f, -3.5f,
190         18.8f, -5.5f, 2.9f
191     };
192 
193     std::vector<T> weightsData {
194         -8.4f, 20.0f, -10.4f, -8, 16.4f, -11.8f,
195         23.4f, 10.4f, -14.0f, -3.8f, -11.8f, 11.4f
196     };
197 
198     std::vector<T> floatExpectedOutputData {
199         -107.04f, 110.f
200     };
201     std::vector<T> expectedOutputData = armnnUtils::QuantizedVector<T>(floatExpectedOutputData);
202 
203     armnn::INetworkPtr network = CreateFullyConnectedNetworkNonConstWeights(inputTensorInfo,
204                                                                             outputTensorInfo,
205                                                                             weightsTensorInfo,
206                                                                             descriptor);
207 
208     CHECK(network);
209 
210     std::map<int, std::vector<T>> inputTensorData    = {{ 0, inputData }, {1, weightsData}};
211     std::map<int, std::vector<T>> expectedOutputTensorData = {{ 0, expectedOutputData }};
212 
213     EndToEndLayerTestImpl<ArmnnType, ArmnnType>(move(network),
214                                                 inputTensorData,
215                                                 expectedOutputTensorData,
216                                                 backends,
217                                                 1.0f);
218 }
219 
220 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
FullyConnectedWithDynamicOrConstantInputsEndToEnd(const std::vector<armnn::BackendId> & backends,const bool transposeWeights,const bool constantWeightsOrBias)221 void FullyConnectedWithDynamicOrConstantInputsEndToEnd(const std::vector<armnn::BackendId>& backends,
222                                                        const bool transposeWeights,
223                                                        const bool constantWeightsOrBias)
224 {
225     unsigned int inputWidth = 1;
226     unsigned int inputHeight = 1;
227     unsigned int inputChannels = 5;
228     unsigned int inputNum = 2;
229 
230     unsigned int outputChannels = 3;
231     unsigned int outputNum = 2;
232 
233     unsigned int inputShape[]   = { inputNum, inputChannels, inputHeight, inputWidth };
234     unsigned int outputShape[]  = { outputNum, outputChannels };
235     unsigned int weightsShape[] = { inputChannels, outputChannels };
236 
237     if (transposeWeights)
238     {
239         std::swap(weightsShape[0], weightsShape[1]);
240     }
241 
242     unsigned int biasShape[] = { outputChannels };
243 
244     armnn::TensorInfo inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32, 0.0f, 0, true);
245     armnn::TensorInfo outputTensorInfo = armnn::TensorInfo(2, outputShape, armnn::DataType::Float32);
246     armnn::TensorInfo weightsDesc = armnn::TensorInfo(2, weightsShape, armnn::DataType::Float32, 0.0f, 0, true);
247     armnn::TensorInfo biasesDesc = armnn::TensorInfo(1, biasShape, armnn::DataType::Float32, 0.0f, 0, true);
248 
249     std::vector<float> input =
250     {
251         1.0f, 2.0f, 3.0f, 4.0f, 5.0f,
252         5.0f, 4.0f, 3.0f, 2.0f, 1.0f
253     };
254 
255     std::vector<float> weights =
256     {
257         .5f, 2.f, .5f,
258         .5f, 2.f, 1.f,
259         .5f, 2.f, 2.f,
260         .5f, 2.f, 3.f,
261         .5f, 2.f, 4.f
262     };
263 
264     if (transposeWeights)
265     {
266         weights =
267         {
268             .5f, .5f, .5f, .5f, .5f,
269             2.f, 2.f, 2.f, 2.f, 2.f,
270             .5f, 1.f, 2.f, 3.f, 4.f
271         };
272     }
273 
274     std::vector<float> biasValues = std::vector<float>({10.f, 20.f, 30.f});
275 
276     std::vector<float> expectedOutput =
277     {
278         0.5f + 1.0f + 1.5f + 2.0f + 2.5f + biasValues[0],
279         2.0f + 4.0f + 6.0f + 8.0f + 10.f + biasValues[1],
280         0.5f + 2.0f + 6.0f + 12.f + 20.f + biasValues[2],
281 
282         2.5f + 2.0f + 1.5f + 1.0f + 0.5f + biasValues[0],
283         10.0f + 8.0f + 6.0f + 4.0f + 2.f + biasValues[1],
284         2.5f + 4.0f + 6.0f + 6.f + 4.f   + biasValues[2]
285     };
286 
287     FullyConnectedDescriptor descriptor;
288     descriptor.m_BiasEnabled = true;
289     descriptor.m_TransposeWeightMatrix = transposeWeights;
290     descriptor.m_ConstantWeights = constantWeightsOrBias;
291 
292     if (!constantWeightsOrBias)
293     {
294         // Tests non constant weights and constant bias.
295         ConstTensor biasConstantTensor(biasesDesc, biasValues.data());
296 
297         armnn::INetworkPtr network = CreateFullyConnectedNetworkNonConstWeightsConstBias(inputTensorInfo,
298                                                                                          outputTensorInfo,
299                                                                                          weightsDesc,
300                                                                                          biasesDesc,
301                                                                                          biasConstantTensor,
302                                                                                          descriptor);
303         CHECK(network);
304 
305         std::map<int, std::vector<T>> inputTensorData    = {{ 0, input }, {1, weights}};
306         std::map<int, std::vector<T>> expectedOutputTensorData = {{ 0, expectedOutput }};
307 
308         EndToEndLayerTestImpl<ArmnnType, ArmnnType>(move(network),
309                                                     inputTensorData,
310                                                     expectedOutputTensorData,
311                                                     backends,
312                                                     1.0f);
313     }
314     else
315     {
316         // Tests constant weights and non constant bias.
317         ConstTensor weightsConstantTensor(weightsDesc, weights.data());
318 
319         armnn::INetworkPtr network = CreateFullyConnectedNetworkConstWeightsNonConstBias(inputTensorInfo,
320                                                                                          outputTensorInfo,
321                                                                                          weightsDesc,
322                                                                                          biasesDesc,
323                                                                                          weightsConstantTensor,
324                                                                                          descriptor);
325         CHECK(network);
326 
327         std::map<int, std::vector<T>> inputTensorData    = {{ 0, input }, {2, biasValues}};
328         std::map<int, std::vector<T>> expectedOutputTensorData = {{ 0, expectedOutput }};
329 
330         EndToEndLayerTestImpl<ArmnnType, ArmnnType>(move(network),
331                                                     inputTensorData,
332                                                     expectedOutputTensorData,
333                                                     backends,
334                                                     1.0f);
335     }
336 }
337 
338 template<armnn::DataType ArmnnType, typename T = armnn::ResolveType<ArmnnType>>
FullyConnectedErrorChecking(const std::vector<armnn::BackendId> & backends,const bool explicitCheck,const bool biasEnabled,const bool connectedWeights,const bool connectedBias,const bool tensorInfoSet)339 void FullyConnectedErrorChecking(const std::vector<armnn::BackendId>& backends,
340                                  const bool explicitCheck,
341                                  const bool biasEnabled,
342                                  const bool connectedWeights,
343                                  const bool connectedBias,
344                                  const bool tensorInfoSet)
345 {
346     unsigned int inputWidth = 1;
347     unsigned int inputHeight = 1;
348     unsigned int inputChannels = 5;
349     unsigned int inputNum = 2;
350 
351     unsigned int outputChannels = 3;
352     unsigned int outputNum = 2;
353 
354     unsigned int inputShape[]   = { inputNum, inputChannels, inputHeight, inputWidth };
355     unsigned int outputShape[]  = { outputNum, outputChannels };
356     unsigned int weightsShape[] = { inputChannels, outputChannels };
357 
358     unsigned int biasShape[] = { outputChannels };
359 
360     armnn::TensorInfo inputTensorInfo = armnn::TensorInfo(4, inputShape, armnn::DataType::Float32, 0.0f, 0, true);
361     armnn::TensorInfo outputTensorInfo = armnn::TensorInfo(2, outputShape, armnn::DataType::Float32);
362     armnn::TensorInfo weightsDesc = armnn::TensorInfo(2, weightsShape, armnn::DataType::Float32, 0.0f, 0, true);
363     armnn::TensorInfo biasesDesc = armnn::TensorInfo(1, biasShape, armnn::DataType::Float32, 0.0f, 0, true);
364 
365     std::vector<float> weights =
366     {
367         .5f, 2.f, .5f,
368         .5f, 2.f, 1.f,
369         .5f, 2.f, 2.f,
370         .5f, 2.f, 3.f,
371         .5f, 2.f, 4.f
372     };
373 
374     FullyConnectedDescriptor descriptor;
375     descriptor.m_BiasEnabled = biasEnabled;
376 
377     if(explicitCheck)
378     {
379         if(!biasEnabled)
380         {
381             try
382             {
383                 CreateFullyConnectedNetworkNoConnectedWeightsExplicit(inputTensorInfo,
384                                                                       outputTensorInfo,
385                                                                       biasesDesc,
386                                                                       descriptor);
387                 FAIL("LayerValidationException should have been thrown");
388             }
389             catch (const LayerValidationException& exc)
390             {
391                 CHECK(strcmp(exc.what(), "Tried to connect bias to FullyConnected layer when bias is not enabled: "
392                                          "Failed to connect to input slot 2 on FullyConnected layer "
393                                          "\"Fully_Connected\" as the slot does not exist or is unavailable") == 0);
394             }
395         }
396         else if (!connectedWeights)
397         {
398             armnn::INetworkPtr network = CreateFullyConnectedNetworkNoConnectedWeightsExplicit(inputTensorInfo,
399                                                                                                outputTensorInfo,
400                                                                                                biasesDesc,
401                                                                                                descriptor);
402             CHECK(network);
403 
404             // Create runtime in which test will run
405             IRuntime::CreationOptions options;
406             IRuntimePtr               runtime(IRuntime::Create(options));
407 
408             CHECK_THROWS_AS(Optimize(*network, backends, runtime->GetDeviceSpec()), LayerValidationException);
409         }
410         else if (!connectedBias)
411         {
412             // Tests with constant weights.
413             ConstTensor weightsConstantTensor(weightsDesc, weights.data());
414 
415             armnn::INetworkPtr network = CreateFullyConnectedNetworkNoConnectedBiasExplicit(inputTensorInfo,
416                                                                                             outputTensorInfo,
417                                                                                             weightsDesc,
418                                                                                             weightsConstantTensor,
419                                                                                             descriptor);
420             CHECK(network);
421 
422             // Create runtime in which test will run
423             IRuntime::CreationOptions options;
424             IRuntimePtr               runtime(IRuntime::Create(options));
425 
426             CHECK_THROWS_AS(Optimize(*network, backends, runtime->GetDeviceSpec()), LayerValidationException);
427         }
428     }
429     else if(!connectedWeights && !connectedBias)
430     {
431         armnn::INetworkPtr network = CreateFullyConnectedNetworkNoConnectedWeightsAndBias(inputTensorInfo,
432                                                                                           outputTensorInfo,
433                                                                                           descriptor);
434         CHECK(network);
435 
436         // Create runtime in which test will run
437         IRuntime::CreationOptions options;
438         IRuntimePtr               runtime(IRuntime::Create(options));
439 
440         CHECK_THROWS_AS(Optimize(*network, backends, runtime->GetDeviceSpec()), LayerValidationException);
441     }
442     else if(!tensorInfoSet)
443     {
444         // Tests with constant weights.
445         ConstTensor weightsConstantTensor(weightsDesc, weights.data());
446 
447         armnn::INetworkPtr network = CreateFullyConnectedNetworkNoTensorInfoConstWeights(inputTensorInfo,
448                                                                                          outputTensorInfo,
449                                                                                          weightsConstantTensor,
450                                                                                          descriptor);
451         CHECK(network);
452 
453         // Create runtime in which test will run
454         IRuntime::CreationOptions options;
455         IRuntimePtr runtime(IRuntime::Create(options));
456 
457         try
458         {
459             Optimize(*network, backends, runtime->GetDeviceSpec());
460             FAIL("LayerValidationException should have been thrown");
461         }
462         catch (const LayerValidationException& exc)
463         {
464             CHECK(strcmp(exc.what(), "Output slot TensorInfo not set on Constant layer \"Weights\"") == 0);
465         }
466     }
467 }
468 
469 } // anonymous namespace
470