1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15
16 #include <jni.h>
17
18 #include <sstream>
19 #include <string>
20
21 #include "tensorflow/lite/tools/benchmark/benchmark_tflite_model.h"
22
23 #ifdef __ANDROID__
24 #include <android/log.h>
25 #endif
26
27 namespace tflite {
28 namespace benchmark {
29 namespace {
30
31 class AndroidBenchmarkLoggingListener : public BenchmarkListener {
OnBenchmarkEnd(const BenchmarkResults & results)32 void OnBenchmarkEnd(const BenchmarkResults& results) override {
33 auto inference_us = results.inference_time_us();
34 auto init_us = results.startup_latency_us();
35 auto warmup_us = results.warmup_time_us();
36 std::stringstream results_output;
37 results_output << "Average inference timings in us: "
38 << "Warmup: " << warmup_us.avg() << ", "
39 << "Init: " << init_us << ", "
40 << "Inference: " << inference_us.avg();
41 results_output << "Overall " << results.overall_mem_usage();
42 results_output << std::endl
43 << "Inference time us:" << results.inference_time_us();
44
45 #ifdef __ANDROID__
46 __android_log_print(ANDROID_LOG_ERROR, "tflite", "%s",
47 results_output.str().c_str());
48 #else
49 fprintf(stderr, "%s", results_output.str().c_str());
50 #endif
51 }
52 };
53
Run(int argc,char ** argv)54 void Run(int argc, char** argv) {
55 BenchmarkTfLiteModel benchmark;
56 AndroidBenchmarkLoggingListener listener;
57 benchmark.AddListener(&listener);
58 benchmark.Run(argc, argv);
59 }
60
61 } // namespace
62 } // namespace benchmark
63 } // namespace tflite
64
65 extern "C" {
66
67 JNIEXPORT void JNICALL
Java_org_tensorflow_lite_benchmark_BenchmarkModel_nativeRun(JNIEnv * env,jclass clazz,jstring args_obj)68 Java_org_tensorflow_lite_benchmark_BenchmarkModel_nativeRun(JNIEnv* env,
69 jclass clazz,
70 jstring args_obj) {
71 const char* args_chars = env->GetStringUTFChars(args_obj, nullptr);
72
73 // Split the args string into individual arg tokens.
74 std::istringstream iss(args_chars);
75 std::vector<std::string> args_split{std::istream_iterator<std::string>(iss),
76 {}};
77
78 // Construct a fake argv command-line object for the benchmark.
79 std::vector<char*> argv;
80 std::string arg0 = "(BenchmarkModelAndroid)";
81 argv.push_back(const_cast<char*>(arg0.data()));
82 for (auto& arg : args_split) {
83 argv.push_back(const_cast<char*>(arg.data()));
84 }
85
86 tflite::benchmark::Run(static_cast<int>(argv.size()), argv.data());
87
88 env->ReleaseStringUTFChars(args_obj, args_chars);
89 }
90
91 } // extern "C"
92