• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #include "tensorflow/core/kernels/batching_util/batch_scheduler.h"
17 
18 #include "tensorflow/core/platform/env.h"
19 #include "tensorflow/core/platform/macros.h"
20 #include "tensorflow/core/platform/test.h"
21 
22 namespace tensorflow {
23 namespace serving {
24 namespace {
25 
26 class FakeTask : public BatchTask {
27  public:
FakeTask(size_t size)28   explicit FakeTask(size_t size) : size_(size) {}
29 
30   ~FakeTask() override = default;
31 
size() const32   size_t size() const override { return size_; }
33 
34  private:
35   const size_t size_;
36 
37   TF_DISALLOW_COPY_AND_ASSIGN(FakeTask);
38 };
39 
TEST(BatchTest,Basic)40 TEST(BatchTest, Basic) {
41   Batch<FakeTask> batch;
42 
43   EXPECT_EQ(0, batch.num_tasks());
44   EXPECT_TRUE(batch.empty());
45   EXPECT_EQ(0, batch.size());
46   EXPECT_FALSE(batch.IsClosed());
47 
48   auto task0 = new FakeTask(3);
49   batch.AddTask(std::unique_ptr<FakeTask>(task0));
50 
51   EXPECT_EQ(1, batch.num_tasks());
52   EXPECT_FALSE(batch.empty());
53   EXPECT_EQ(task0->size(), batch.size());
54   EXPECT_EQ(task0->size(), batch.task(0).size());
55   EXPECT_FALSE(batch.IsClosed());
56 
57   auto task1 = new FakeTask(7);
58   batch.AddTask(std::unique_ptr<FakeTask>(task1));
59 
60   EXPECT_EQ(2, batch.num_tasks());
61   EXPECT_FALSE(batch.empty());
62   EXPECT_EQ(task0->size() + task1->size(), batch.size());
63   EXPECT_EQ(task1->size(), batch.task(1).size());
64   EXPECT_EQ(task1->size(), batch.mutable_task(1)->size());
65   EXPECT_FALSE(batch.IsClosed());
66 
67   batch.Close();
68   EXPECT_TRUE(batch.IsClosed());
69 
70   EXPECT_EQ(2, batch.num_tasks());
71   EXPECT_FALSE(batch.empty());
72   EXPECT_EQ(task0->size() + task1->size(), batch.size());
73   EXPECT_EQ(task0->size(), batch.task(0).size());
74   EXPECT_EQ(task1->size(), batch.task(1).size());
75 
76   EXPECT_EQ(7, batch.RemoveTask()->size());
77   EXPECT_EQ(3, batch.size());
78   EXPECT_EQ(3, batch.RemoveTask()->size());
79   EXPECT_EQ(0, batch.size());
80   EXPECT_TRUE(batch.empty());
81 }
82 
TEST(BatchTest,WaitUntilClosed)83 TEST(BatchTest, WaitUntilClosed) {
84   Batch<FakeTask> batch;
85   batch.AddTask(std::unique_ptr<FakeTask>(new FakeTask(3)));
86   EXPECT_FALSE(batch.IsClosed());
87 
88   std::unique_ptr<Thread> close_thread(
89       Env::Default()->StartThread(ThreadOptions(), "test", [&batch]() {
90         Env::Default()->SleepForMicroseconds(100);
91         batch.Close();
92       }));
93   batch.WaitUntilClosed();
94   EXPECT_TRUE(batch.IsClosed());
95 }
96 
TEST(BatchTest,DeletionBlocksUntilClosed)97 TEST(BatchTest, DeletionBlocksUntilClosed) {
98   Batch<FakeTask>* batch = new Batch<FakeTask>;
99   batch->AddTask(std::unique_ptr<FakeTask>(new FakeTask(3)));
100   EXPECT_FALSE(batch->IsClosed());
101 
102   Notification do_delete, deleted;
103   std::unique_ptr<Thread> delete_thread(Env::Default()->StartThread(
104       ThreadOptions(), "test", [&batch, &do_delete, &deleted]() {
105         do_delete.WaitForNotification();
106         delete batch;
107         deleted.Notify();
108       }));
109   do_delete.Notify();
110   Env::Default()->SleepForMicroseconds(10 * 1000 /* 10 milliseconds */);
111   EXPECT_FALSE(deleted.HasBeenNotified());
112   batch->Close();
113   deleted.WaitForNotification();
114 }
115 
TEST(BatchTest,RemoveAllTasks)116 TEST(BatchTest, RemoveAllTasks) {
117   Batch<FakeTask> batch;
118 
119   auto task0 = new FakeTask(3);
120   batch.AddTask(std::unique_ptr<FakeTask>(task0));
121 
122   auto task1 = new FakeTask(7);
123   batch.AddTask(std::unique_ptr<FakeTask>(task1));
124 
125   batch.Close();
126   EXPECT_TRUE(batch.IsClosed());
127 
128   std::vector<std::unique_ptr<FakeTask>> tasks_in_batch =
129       batch.RemoveAllTasks();
130   EXPECT_EQ(2, tasks_in_batch.size());
131   EXPECT_TRUE(batch.empty());
132 
133   EXPECT_EQ(task0, tasks_in_batch[0].get());
134   EXPECT_EQ(task1, tasks_in_batch[1].get());
135 
136   // RemoveAllTasks returns empty vector from the second call and on, since
137   // batch is closed.
138   EXPECT_THAT(batch.RemoveAllTasks(), ::testing::IsEmpty());  // second call
139   EXPECT_THAT(batch.RemoveAllTasks(), ::testing::IsEmpty());  // third call
140 }
141 
142 }  // namespace
143 }  // namespace serving
144 }  // namespace tensorflow
145