• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2018 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/lite/experimental/micro/micro_interpreter.h"
17 
18 #include "tensorflow/lite/experimental/micro/testing/micro_test.h"
19 
20 namespace tflite {
21 namespace {
22 class StackAllocator : public flatbuffers::Allocator {
23  public:
StackAllocator()24   StackAllocator() : data_(data_backing_), data_size_(0) {}
25 
allocate(size_t size)26   uint8_t* allocate(size_t size) override {
27     if ((data_size_ + size) > kStackAllocatorSize) {
28       // TODO(petewarden): Add error reporting beyond returning null!
29       return nullptr;
30     }
31     uint8_t* result = data_;
32     data_ += size;
33     data_size_ += size;
34     return result;
35   }
36 
deallocate(uint8_t * p,size_t)37   void deallocate(uint8_t* p, size_t) override {}
38 
instance()39   static StackAllocator& instance() {
40     // Avoid using true dynamic memory allocation to be portable to bare metal.
41     static char inst_memory[sizeof(StackAllocator)];
42     static StackAllocator* inst = new (inst_memory) StackAllocator;
43     return *inst;
44   }
45 
46   static constexpr int kStackAllocatorSize = 4096;
47 
48  private:
49   uint8_t data_backing_[kStackAllocatorSize];
50   uint8_t* data_;
51   int data_size_;
52 };
53 
BuilderInstance()54 flatbuffers::FlatBufferBuilder* BuilderInstance() {
55   static char inst_memory[sizeof(flatbuffers::FlatBufferBuilder)];
56   static flatbuffers::FlatBufferBuilder* inst =
57       new (inst_memory) flatbuffers::FlatBufferBuilder(
58           StackAllocator::kStackAllocatorSize, &StackAllocator::instance());
59   return inst;
60 }
61 
Create1dTensor(int size)62 const Tensor* Create1dTensor(int size) {
63   using flatbuffers::Offset;
64   flatbuffers::FlatBufferBuilder* builder = BuilderInstance();
65   constexpr size_t tensor_shape_size = 1;
66   const int32_t tensor_shape[tensor_shape_size] = {size};
67   const Offset<Tensor> tensor_offset = CreateTensor(
68       *builder, builder->CreateVector(tensor_shape, tensor_shape_size),
69       TensorType_INT32, 0, builder->CreateString("test_tensor"), 0, false);
70   builder->Finish(tensor_offset);
71   void* tensor_pointer = builder->GetBufferPointer();
72   const Tensor* tensor = flatbuffers::GetRoot<Tensor>(tensor_pointer);
73   return tensor;
74 }
75 
CreateBuffers()76 const flatbuffers::Vector<flatbuffers::Offset<Buffer>>* CreateBuffers() {
77   using flatbuffers::Offset;
78   flatbuffers::FlatBufferBuilder* builder = BuilderInstance();
79   constexpr size_t buffers_size = 1;
80   const Offset<Buffer> buffers[buffers_size] = {
81       CreateBuffer(*builder),
82   };
83   const flatbuffers::Offset<flatbuffers::Vector<flatbuffers::Offset<Buffer>>>
84       buffers_offset = builder->CreateVector(buffers, buffers_size);
85   builder->Finish(buffers_offset);
86   void* buffers_pointer = builder->GetBufferPointer();
87   const flatbuffers::Vector<flatbuffers::Offset<Buffer>>* result =
88       flatbuffers::GetRoot<flatbuffers::Vector<flatbuffers::Offset<Buffer>>>(
89           buffers_pointer);
90   return result;
91 }
92 
93 }  // namespace
94 }  // namespace tflite
95 
96 TF_LITE_MICRO_TESTS_BEGIN
97 
TF_LITE_MICRO_TEST(TestAllocateTensor)98 TF_LITE_MICRO_TEST(TestAllocateTensor) {
99   constexpr size_t arena_size = 1024;
100   uint8_t arena[arena_size];
101   tflite::SimpleTensorAllocator allocator(arena, arena_size);
102 
103   const tflite::Tensor* tensor = tflite::Create1dTensor(100);
104   const flatbuffers::Vector<flatbuffers::Offset<tflite::Buffer>>* buffers =
105       tflite::CreateBuffers();
106 
107   TfLiteTensor allocated_tensor;
108   TF_LITE_MICRO_EXPECT_EQ(
109       kTfLiteOk,
110       allocator.AllocateTensor(*tensor, 0, 1, buffers, micro_test::reporter,
111                                &allocated_tensor));
112   TF_LITE_MICRO_EXPECT_EQ(kTfLiteInt32, allocated_tensor.type);
113   TF_LITE_MICRO_EXPECT_EQ(1, allocated_tensor.dims->size);
114   TF_LITE_MICRO_EXPECT_EQ(100, allocated_tensor.dims->data[0]);
115   TF_LITE_MICRO_EXPECT_EQ(400, allocated_tensor.bytes);
116   TF_LITE_MICRO_EXPECT_NE(nullptr, allocated_tensor.data.i32);
117 }
118 
TF_LITE_MICRO_TEST(TestTooLarge)119 TF_LITE_MICRO_TEST(TestTooLarge) {
120   constexpr size_t arena_size = 1024;
121   uint8_t arena[arena_size];
122   tflite::SimpleTensorAllocator allocator(arena, arena_size);
123 
124   const tflite::Tensor* tensor = tflite::Create1dTensor(2000);
125   const flatbuffers::Vector<flatbuffers::Offset<tflite::Buffer>>* buffers =
126       tflite::CreateBuffers();
127 
128   TfLiteTensor allocated_tensor;
129   TF_LITE_MICRO_EXPECT_NE(
130       kTfLiteOk,
131       allocator.AllocateTensor(*tensor, 0, 1, buffers, micro_test::reporter,
132                                &allocated_tensor));
133 }
134 
TF_LITE_MICRO_TEST(TestJustFits)135 TF_LITE_MICRO_TEST(TestJustFits) {
136   constexpr size_t arena_size = 1024;
137   uint8_t arena[arena_size];
138   tflite::SimpleTensorAllocator allocator(arena, arena_size);
139 
140   uint8_t* result = allocator.AllocateMemory(arena_size, 1);
141   TF_LITE_MICRO_EXPECT_NE(nullptr, result);
142 }
143 
TF_LITE_MICRO_TEST(TestAligned)144 TF_LITE_MICRO_TEST(TestAligned) {
145   constexpr size_t arena_size = 1024;
146   uint8_t arena[arena_size];
147   tflite::SimpleTensorAllocator allocator(arena, arena_size);
148 
149   uint8_t* result = allocator.AllocateMemory(1, 1);
150   TF_LITE_MICRO_EXPECT_NE(nullptr, result);
151 
152   result = allocator.AllocateMemory(16, 4);
153   TF_LITE_MICRO_EXPECT_NE(nullptr, result);
154   TF_LITE_MICRO_EXPECT_EQ(0, reinterpret_cast<size_t>(result) & 3);
155 }
156 
TF_LITE_MICRO_TEST(TestMultipleTooLarge)157 TF_LITE_MICRO_TEST(TestMultipleTooLarge) {
158   constexpr size_t arena_size = 1024;
159   uint8_t arena[arena_size];
160   tflite::SimpleTensorAllocator allocator(arena, arena_size);
161 
162   uint8_t* result = allocator.AllocateMemory(768, 1);
163   TF_LITE_MICRO_EXPECT_NE(nullptr, result);
164 
165   result = allocator.AllocateMemory(768, 1);
166   TF_LITE_MICRO_EXPECT_EQ(nullptr, result);
167 }
168 
169 TF_LITE_MICRO_TESTS_END
170