1 /* Copyright 2021 The TensorFlow Authors. All Rights Reserved.
2
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6
7 http://www.apache.org/licenses/LICENSE-2.0
8
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 #include "tensorflow/lite/experimental/acceleration/mini_benchmark/model_loader.h"
16
17 #include <cstdlib>
18 #include <iostream>
19 #include <memory>
20 #include <string>
21 #include <utility>
22 #include <vector>
23
24 #include "absl/strings/match.h"
25 #include "absl/strings/numbers.h"
26 #include "absl/strings/str_split.h"
27 #include "tensorflow/lite/experimental/acceleration/mini_benchmark/status_codes.h"
28 #include "tensorflow/lite/minimal_logging.h"
29 #include "tensorflow/lite/model_builder.h"
30
31 namespace tflite {
32 namespace acceleration {
33
Init()34 MinibenchmarkStatus ModelLoader::Init() {
35 if (model_) {
36 // Already done.
37 return kMinibenchmarkSuccess;
38 }
39 MinibenchmarkStatus status = InitInternal();
40 if (status != kMinibenchmarkSuccess) {
41 return status;
42 }
43 if (!model_) {
44 return kMinibenchmarkModelBuildFailed;
45 }
46 return kMinibenchmarkSuccess;
47 }
48
InitInternal()49 MinibenchmarkStatus PathModelLoader::InitInternal() {
50 if (model_path_.empty()) {
51 return kMinibenchmarkPreconditionNotMet;
52 }
53 model_ = FlatBufferModel::VerifyAndBuildFromFile(model_path_.c_str());
54 return kMinibenchmarkSuccess;
55 }
56
57 #ifndef _WIN32
58
InitInternal()59 MinibenchmarkStatus MmapModelLoader::InitInternal() {
60 if (model_fd_ < 0 || model_offset_ < 0 || model_size_ < 0) {
61 return kMinibenchmarkModelReadFailed;
62 }
63 if (!MMAPAllocation::IsSupported()) {
64 return kMinibenchmarkUnsupportedPlatform;
65 }
66 auto allocation = std::make_unique<MMAPAllocation>(
67 model_fd_, model_offset_, model_size_, tflite::DefaultErrorReporter());
68 if (!allocation->valid()) {
69 return kMinibenchmarkModelReadFailed;
70 }
71 model_ = FlatBufferModel::VerifyAndBuildFromAllocation(std::move(allocation));
72 return kMinibenchmarkSuccess;
73 }
74
InitInternal()75 MinibenchmarkStatus PipeModelLoader::InitInternal() {
76 if (pipe_fd_ < 0) {
77 return kMinibenchmarkModelReadFailed;
78 }
79
80 std::free(model_buffer_);
81 model_buffer_ = reinterpret_cast<uint8_t*>(std::malloc(model_size_));
82
83 int read_bytes = 0;
84 int remaining_bytes = model_size_;
85 uint8_t* buffer = model_buffer_;
86 while (remaining_bytes > 0 &&
87 (read_bytes = read(pipe_fd_, buffer, remaining_bytes)) > 0) {
88 remaining_bytes -= read_bytes;
89 buffer += read_bytes;
90 }
91 // Close the read pipe.
92 close(pipe_fd_);
93 if (read_bytes < 0 || remaining_bytes != 0) {
94 TFLITE_LOG_PROD(TFLITE_LOG_INFO,
95 "Read Model from pipe failed: %s. Expect to read %d bytes, "
96 "%d bytes missing.",
97 std::strerror(errno), model_size_, remaining_bytes);
98 // If read() failed with -1, or read partial or too much data.
99 return kMinibenchmarkModelReadFailed;
100 }
101
102 model_ = FlatBufferModel::BuildFromModel(tflite::GetModel(model_buffer_));
103 return kMinibenchmarkSuccess;
104 }
105
106 #endif // !_WIN32
107
CreateModelLoaderFromPath(absl::string_view path)108 std::unique_ptr<ModelLoader> CreateModelLoaderFromPath(absl::string_view path) {
109 if (absl::StartsWith(path, "fd:")) {
110 #ifdef _WIN32
111 return kMinibenchmarkUnsupportedPlatform;
112 #endif // _WIN32
113 std::vector<std::string> parts = absl::StrSplit(path, ':');
114 int model_fd;
115 size_t model_offset, model_size;
116 if (parts.size() != 4 || !absl::SimpleAtoi(parts[1], &model_fd) ||
117 !absl::SimpleAtoi(parts[2], &model_offset) ||
118 !absl::SimpleAtoi(parts[3], &model_size)) {
119 return nullptr;
120 }
121 return std::make_unique<MmapModelLoader>(model_fd, model_offset,
122 model_size);
123 }
124 if (absl::StartsWith(path, "pipe:")) {
125 #ifdef _WIN32
126 return kMinibenchmarkUnsupportedPlatform;
127 #endif // _WIN32
128 std::vector<std::string> parts = absl::StrSplit(path, ':');
129 int read_fd, write_fd;
130 size_t model_size;
131 if (parts.size() != 4 || !absl::SimpleAtoi(parts[1], &read_fd) ||
132 !absl::SimpleAtoi(parts[2], &write_fd) ||
133 !absl::SimpleAtoi(parts[3], &model_size)) {
134 return nullptr;
135 }
136 // If set, close the write pipe for the read process / thread.
137 if (write_fd >= 0) {
138 close(write_fd);
139 }
140 return std::make_unique<PipeModelLoader>(read_fd, model_size);
141 }
142 return std::make_unique<PathModelLoader>(path);
143 }
144
145 } // namespace acceleration
146 } // namespace tflite
147