1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef __ANDROID_HAL_CAMERA2_TESTS_STREAM_FIXTURE__
18 #define __ANDROID_HAL_CAMERA2_TESTS_STREAM_FIXTURE__
19
20 #include <gtest/gtest.h>
21 #include <iostream>
22 #include <fstream>
23
24 #include <gui/CpuConsumer.h>
25 #include <gui/Surface.h>
26 #include <utils/Condition.h>
27 #include <utils/Mutex.h>
28 #include <system/camera_metadata.h>
29
30 #include "CameraModuleFixture.h"
31 #include "TestExtensions.h"
32
33 #define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
34
35 namespace android {
36 namespace camera2 {
37 namespace tests {
38
39 // Format specifier for picking the best format for CPU reading the given device
40 // version
41 #define CAMERA_STREAM_AUTO_CPU_FORMAT (-1)
42
43 struct CameraStreamParams;
44
45 void PrintTo(const CameraStreamParams& p, ::std::ostream* os);
46
47 struct CameraStreamParams {
48 int mFormat;
49 int mHeapCount;
50
51 };
52
53 inline ::std::ostream& operator<<(::std::ostream& os, const CameraStreamParams &p) {
54 PrintTo(p, &os);
55 return os;
56 }
57
PrintTo(const CameraStreamParams & p,::std::ostream * os)58 inline void PrintTo(const CameraStreamParams& p, ::std::ostream* os) {
59 char fmt[100];
60 camera_metadata_enum_snprint(
61 ANDROID_SCALER_AVAILABLE_FORMATS, p.mFormat, fmt, sizeof(fmt));
62
63 *os << "{ ";
64 *os << "Format: 0x" << std::hex << p.mFormat << ", ";
65 *os << "Format name: " << fmt << ", ";
66 *os << "HeapCount: " << p.mHeapCount;
67 *os << " }";
68 }
69
70 class CameraStreamFixture
71 : public CameraModuleFixture</*InfoQuirk*/true> {
72
73 public:
CameraStreamFixture(CameraStreamParams p)74 CameraStreamFixture(CameraStreamParams p)
75 : CameraModuleFixture(TestSettings::DeviceId()) {
76 TEST_EXTENSION_FORKING_CONSTRUCTOR;
77
78 mParam = p;
79
80 SetUp();
81 }
82
~CameraStreamFixture()83 ~CameraStreamFixture() {
84 TEST_EXTENSION_FORKING_DESTRUCTOR;
85
86 TearDown();
87 }
88
89 private:
90
SetUp()91 void SetUp() {
92 TEST_EXTENSION_FORKING_SET_UP;
93
94 CameraModuleFixture::SetUp();
95
96 sp<CameraDeviceBase> device = mDevice;
97
98 /* use an arbitrary w,h */
99 {
100 const int tag = ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES;
101
102 const CameraMetadata& staticInfo = device->info();
103 camera_metadata_ro_entry entry = staticInfo.find(tag);
104 ASSERT_NE(0u, entry.count)
105 << "Missing tag android.scaler.availableProcessedSizes";
106
107 ASSERT_LE(2u, entry.count);
108 /* this seems like it would always be the smallest w,h
109 but we actually make no contract that it's sorted asc */;
110 mWidth = entry.data.i32[0];
111 mHeight = entry.data.i32[1];
112 }
113 }
TearDown()114 void TearDown() {
115 TEST_EXTENSION_FORKING_TEAR_DOWN;
116
117 // important: shut down HAL before releasing streams
118 CameraModuleFixture::TearDown();
119
120 mNativeWindow.clear();
121 mCpuConsumer.clear();
122 mFrameListener.clear();
123 }
124
125 protected:
126 struct FrameListener : public ConsumerBase::FrameAvailableListener {
127
FrameListenerFrameListener128 FrameListener() {
129 mPendingFrames = 0;
130 }
131
132 // CpuConsumer::FrameAvailableListener implementation
onFrameAvailableFrameListener133 virtual void onFrameAvailable() {
134 ALOGV("Frame now available (start)");
135
136 Mutex::Autolock lock(mMutex);
137 mPendingFrames++;
138 mCondition.signal();
139
140 ALOGV("Frame now available (end)");
141 }
142
waitForFrameFrameListener143 status_t waitForFrame(nsecs_t timeout) {
144 status_t res;
145 Mutex::Autolock lock(mMutex);
146 while (mPendingFrames == 0) {
147 res = mCondition.waitRelative(mMutex, timeout);
148 if (res != OK) return res;
149 }
150 mPendingFrames--;
151 return OK;
152 }
153
154 private:
155 Mutex mMutex;
156 Condition mCondition;
157 int mPendingFrames;
158 };
159
CreateStream()160 void CreateStream() {
161 sp<CameraDeviceBase> device = mDevice;
162 CameraStreamParams p = mParam;
163
164 sp<BufferQueue> bq = new BufferQueue();
165 mCpuConsumer = new CpuConsumer(bq, p.mHeapCount);
166 mCpuConsumer->setName(String8("CameraStreamTest::mCpuConsumer"));
167
168 mNativeWindow = new Surface(bq);
169
170 int format = MapAutoFormat(p.mFormat);
171
172 ASSERT_EQ(OK,
173 device->createStream(mNativeWindow,
174 mWidth, mHeight, format, /*size (for jpegs)*/0,
175 &mStreamId));
176
177 ASSERT_NE(-1, mStreamId);
178
179 // do not make 'this' a FrameListener or the lifetime policy will clash
180 mFrameListener = new FrameListener();
181 mCpuConsumer->setFrameAvailableListener(mFrameListener);
182 }
183
DeleteStream()184 void DeleteStream() {
185 ASSERT_EQ(OK, mDevice->deleteStream(mStreamId));
186 }
187
MapAutoFormat(int format)188 int MapAutoFormat(int format) {
189 if (format == CAMERA_STREAM_AUTO_CPU_FORMAT) {
190 if (getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_0) {
191 format = HAL_PIXEL_FORMAT_YCbCr_420_888;
192 } else {
193 format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
194 }
195 }
196 return format;
197 }
198
DumpYuvToFile(const String8 & fileName,const CpuConsumer::LockedBuffer & img)199 void DumpYuvToFile(const String8 &fileName, const CpuConsumer::LockedBuffer &img) {
200 uint8_t *dataCb, *dataCr;
201 uint32_t stride;
202 uint32_t chromaStride;
203 uint32_t chromaStep;
204
205 switch (img.format) {
206 case HAL_PIXEL_FORMAT_YCbCr_420_888:
207 stride = img.stride;
208 chromaStride = img.chromaStride;
209 chromaStep = img.chromaStep;
210 dataCb = img.dataCb;
211 dataCr = img.dataCr;
212 break;
213 case HAL_PIXEL_FORMAT_YCrCb_420_SP:
214 stride = img.width;
215 chromaStride = img.width;
216 chromaStep = 2;
217 dataCr = img.data + img.width * img.height;
218 dataCb = dataCr + 1;
219 break;
220 case HAL_PIXEL_FORMAT_YV12:
221 stride = img.stride;
222 chromaStride = ALIGN(img.width / 2, 16);
223 chromaStep = 1;
224 dataCr = img.data + img.stride * img.height;
225 dataCb = dataCr + chromaStride * img.height/2;
226 break;
227 default:
228 ALOGE("Unknown format %d, not dumping", img.format);
229 return;
230 }
231
232 // Write Y
233 FILE *yuvFile = fopen(fileName.string(), "w");
234
235 size_t bytes;
236
237 for (size_t y = 0; y < img.height; ++y) {
238 bytes = fwrite(
239 reinterpret_cast<const char*>(img.data + stride * y),
240 1, img.width, yuvFile);
241 if (bytes != img.width) {
242 ALOGE("Unable to write to file %s", fileName.string());
243 fclose(yuvFile);
244 return;
245 }
246 }
247
248 // Write Cb/Cr
249 uint8_t *src = dataCb;
250 for (int c = 0; c < 2; ++c) {
251 for (size_t y = 0; y < img.height / 2; ++y) {
252 uint8_t *px = src + y * chromaStride;
253 if (chromaStep != 1) {
254 for (size_t x = 0; x < img.width / 2; ++x) {
255 fputc(*px, yuvFile);
256 px += chromaStep;
257 }
258 } else {
259 bytes = fwrite(reinterpret_cast<const char*>(px),
260 1, img.width / 2, yuvFile);
261 if (bytes != img.width / 2) {
262 ALOGE("Unable to write to file %s", fileName.string());
263 fclose(yuvFile);
264 return;
265 }
266 }
267 }
268 src = dataCr;
269 }
270 fclose(yuvFile);
271 }
272
273 int mWidth;
274 int mHeight;
275
276 int mStreamId;
277
278 android::sp<FrameListener> mFrameListener;
279 android::sp<CpuConsumer> mCpuConsumer;
280 android::sp<ANativeWindow> mNativeWindow;
281
282
283 private:
284 CameraStreamParams mParam;
285 };
286
287 }
288 }
289 }
290
291 #endif
292