• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2017-2021 Arm Limited.
3  *
4  * SPDX-License-Identifier: MIT
5  *
6  * Permission is hereby granted, free of charge, to any person obtaining a copy
7  * of this software and associated documentation files (the "Software"), to
8  * deal in the Software without restriction, including without limitation the
9  * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
10  * sell copies of the Software, and to permit persons to whom the Software is
11  * furnished to do so, subject to the following conditions:
12  *
13  * The above copyright notice and this permission notice shall be included in all
14  * copies or substantial portions of the Software.
15  *
16  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
19  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
21  * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
22  * SOFTWARE.
23  */
24 #include "arm_compute/graph.h"
25 #ifdef ARM_COMPUTE_CL
26 #include "arm_compute/runtime/CL/Utils.h"
27 #endif /* ARM_COMPUTE_CL */
28 #include "support/ToolchainSupport.h"
29 #include "utils/CommonGraphOptions.h"
30 #include "utils/GraphUtils.h"
31 #include "utils/Utils.h"
32 
33 using namespace arm_compute;
34 using namespace arm_compute::utils;
35 using namespace arm_compute::graph::frontend;
36 using namespace arm_compute::graph_utils;
37 
38 /** Example demonstrating how to implement AlexNet's network using the Compute Library's graph API */
39 class GraphAlexnetExample : public Example
40 {
41 public:
GraphAlexnetExample()42     GraphAlexnetExample()
43         : cmd_parser(), common_opts(cmd_parser), common_params(), graph(0, "AlexNet")
44     {
45     }
do_setup(int argc,char ** argv)46     bool do_setup(int argc, char **argv) override
47     {
48         // Parse arguments
49         cmd_parser.parse(argc, argv);
50         cmd_parser.validate();
51 
52         // Consume common parameters
53         common_params = consume_common_graph_parameters(common_opts);
54 
55         // Return when help menu is requested
56         if(common_params.help)
57         {
58             cmd_parser.print_help(argv[0]);
59             return false;
60         }
61 
62         // Checks
63         ARM_COMPUTE_EXIT_ON_MSG(arm_compute::is_data_type_quantized_asymmetric(common_params.data_type), "QASYMM8 not supported for this graph");
64 
65         // Print parameter values
66         std::cout << common_params << std::endl;
67 
68         // Get trainable parameters data path
69         std::string data_path = common_params.data_path;
70 
71         // Create a preprocessor object
72         const std::array<float, 3> mean_rgb{ { 122.68f, 116.67f, 104.01f } };
73         std::unique_ptr<IPreprocessor> preprocessor = std::make_unique<CaffePreproccessor>(mean_rgb);
74 
75         // Create input descriptor
76         const auto        operation_layout = common_params.data_layout;
77         const TensorShape tensor_shape     = permute_shape(TensorShape(227U, 227U, 3U, common_params.batches), DataLayout::NCHW, operation_layout);
78         TensorDescriptor  input_descriptor = TensorDescriptor(tensor_shape, common_params.data_type).set_layout(operation_layout);
79 
80         // Set weights trained layout
81         const DataLayout weights_layout = DataLayout::NCHW;
82 
83         graph << common_params.target
84               << common_params.fast_math_hint
85               << InputLayer(input_descriptor, get_input_accessor(common_params, std::move(preprocessor)))
86               // Layer 1
87               << ConvolutionLayer(
88                   11U, 11U, 96U,
89                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_w.npy", weights_layout),
90                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv1_b.npy"),
91                   PadStrideInfo(4, 4, 0, 0))
92               .set_name("conv1")
93               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu1")
94               << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm1")
95               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))).set_name("pool1")
96               // Layer 2
97               << ConvolutionLayer(
98                   5U, 5U, 256U,
99                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_w.npy", weights_layout),
100                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv2_b.npy"),
101                   PadStrideInfo(1, 1, 2, 2), 2)
102               .set_name("conv2")
103               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu2")
104               << NormalizationLayer(NormalizationLayerInfo(NormType::CROSS_MAP, 5, 0.0001f, 0.75f)).set_name("norm2")
105               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))).set_name("pool2")
106               // Layer 3
107               << ConvolutionLayer(
108                   3U, 3U, 384U,
109                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_w.npy", weights_layout),
110                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv3_b.npy"),
111                   PadStrideInfo(1, 1, 1, 1))
112               .set_name("conv3")
113               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu3")
114               // Layer 4
115               << ConvolutionLayer(
116                   3U, 3U, 384U,
117                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_w.npy", weights_layout),
118                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv4_b.npy"),
119                   PadStrideInfo(1, 1, 1, 1), 2)
120               .set_name("conv4")
121               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu4")
122               // Layer 5
123               << ConvolutionLayer(
124                   3U, 3U, 256U,
125                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_w.npy", weights_layout),
126                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/conv5_b.npy"),
127                   PadStrideInfo(1, 1, 1, 1), 2)
128               .set_name("conv5")
129               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu5")
130               << PoolingLayer(PoolingLayerInfo(PoolingType::MAX, 3, operation_layout, PadStrideInfo(2, 2, 0, 0))).set_name("pool5")
131               // Layer 6
132               << FullyConnectedLayer(
133                   4096U,
134                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_w.npy", weights_layout),
135                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc6_b.npy"))
136               .set_name("fc6")
137               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu6")
138               // Layer 7
139               << FullyConnectedLayer(
140                   4096U,
141                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_w.npy", weights_layout),
142                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc7_b.npy"))
143               .set_name("fc7")
144               << ActivationLayer(ActivationLayerInfo(ActivationLayerInfo::ActivationFunction::RELU)).set_name("relu7")
145               // Layer 8
146               << FullyConnectedLayer(
147                   1000U,
148                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_w.npy", weights_layout),
149                   get_weights_accessor(data_path, "/cnn_data/alexnet_model/fc8_b.npy"))
150               .set_name("fc8")
151               // Softmax
152               << SoftmaxLayer().set_name("prob")
153               << OutputLayer(get_output_accessor(common_params, 5));
154 
155         // Finalize graph
156         GraphConfig config;
157 
158         config.num_threads = common_params.threads;
159         config.use_tuner   = common_params.enable_tuner;
160         config.tuner_mode  = common_params.tuner_mode;
161         config.tuner_file  = common_params.tuner_file;
162         config.mlgo_file   = common_params.mlgo_file;
163 
164         // Load the precompiled kernels from a file into the kernel library, in this way the next time they are needed
165         // compilation won't be required.
166         if(common_params.enable_cl_cache)
167         {
168 #ifdef ARM_COMPUTE_CL
169             restore_program_cache_from_file();
170 #endif /* ARM_COMPUTE_CL */
171         }
172 
173         graph.finalize(common_params.target, config);
174 
175         // Save the opencl kernels to a file
176         if(common_opts.enable_cl_cache)
177         {
178 #ifdef ARM_COMPUTE_CL
179             save_program_cache_to_file();
180 #endif /* ARM_COMPUTE_CL */
181         }
182 
183         return true;
184     }
do_run()185     void do_run() override
186     {
187         // Run graph
188         graph.run();
189     }
190 
191 private:
192     CommandLineParser  cmd_parser;
193     CommonGraphOptions common_opts;
194     CommonGraphParams  common_params;
195     Stream             graph;
196 };
197 
198 /** Main program for AlexNet
199  *
200  * Model is based on:
201  *      https://papers.nips.cc/paper/4824-imagenet-classification-with-deep-convolutional-neural-networks
202  *      "ImageNet Classification with Deep Convolutional Neural Networks"
203  *      Alex Krizhevsky and Sutskever, Ilya and Hinton, Geoffrey E
204  *
205  * Provenance: https://github.com/BVLC/caffe/tree/master/models/bvlc_alexnet
206  *
207  * @note To list all the possible arguments execute the binary appended with the --help option
208  *
209  * @param[in] argc Number of arguments
210  * @param[in] argv Arguments
211  *
212  * @return Return code
213  */
main(int argc,char ** argv)214 int main(int argc, char **argv)
215 {
216     return arm_compute::utils::run_example<GraphAlexnetExample>(argc, argv);
217 }
218