• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2_test"
18 //#define LOG_NDEBUG 0
19 
20 #include <utils/Log.h>
21 #include <gtest/gtest.h>
22 #include <iostream>
23 #include <fstream>
24 
25 #include <utils/Vector.h>
26 #include <gui/CpuConsumer.h>
27 #include <ui/PixelFormat.h>
28 #include <system/camera_metadata.h>
29 
30 #include "camera2_utils.h"
31 #include "TestExtensions.h"
32 
33 namespace android {
34 namespace camera2 {
35 namespace tests {
36 
37 class Camera2Test: public testing::Test {
38   public:
SetUpModule()39     void SetUpModule() {
40         int res;
41 
42         hw_module_t *module = NULL;
43         res = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
44                 (const hw_module_t **)&module);
45 
46         ASSERT_EQ(0, res)
47                 << "Failure opening camera hardware module: " << res;
48         ASSERT_TRUE(NULL != module)
49                 << "No camera module was set by hw_get_module";
50 
51         IF_ALOGV() {
52             std::cout << "  Camera module name: "
53                     << module->name << std::endl;
54             std::cout << "  Camera module author: "
55                     << module->author << std::endl;
56             std::cout << "  Camera module API version: 0x" << std::hex
57                     << module->module_api_version << std::endl;
58             std::cout << "  Camera module HAL API version: 0x" << std::hex
59                     << module->hal_api_version << std::endl;
60         }
61 
62         int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0;
63         ASSERT_LE(version2_0, module->module_api_version)
64                 << "Camera module version is 0x"
65                 << std::hex << module->module_api_version
66                 << ", should be at least 2.0. (0x"
67                 << std::hex << CAMERA_MODULE_API_VERSION_2_0 << ")";
68 
69         sCameraModule = reinterpret_cast<camera_module_t*>(module);
70 
71         sNumCameras = sCameraModule->get_number_of_cameras();
72         ASSERT_LT(0, sNumCameras) << "No camera devices available!";
73 
74         IF_ALOGV() {
75             std::cout << "  Camera device count: " << sNumCameras << std::endl;
76         }
77 
78         sCameraSupportsHal2 = new bool[sNumCameras];
79 
80         for (int i = 0; i < sNumCameras; i++) {
81             camera_info info;
82             res = sCameraModule->get_camera_info(i, &info);
83             ASSERT_EQ(0, res)
84                     << "Failure getting camera info for camera " << i;
85             IF_ALOGV() {
86                 std::cout << "  Camera device: " << std::dec
87                           << i << std::endl;;
88                 std::cout << "    Facing: " << std::dec
89                           << info.facing  << std::endl;
90                 std::cout << "    Orientation: " << std::dec
91                           << info.orientation  << std::endl;
92                 std::cout << "    Version: 0x" << std::hex <<
93                         info.device_version  << std::endl;
94             }
95             if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0 &&
96                     info.device_version < CAMERA_DEVICE_API_VERSION_3_0) {
97                 sCameraSupportsHal2[i] = true;
98                 ASSERT_TRUE(NULL != info.static_camera_characteristics);
99                 IF_ALOGV() {
100                     std::cout << "    Static camera metadata:"  << std::endl;
101                     dump_indented_camera_metadata(info.static_camera_characteristics,
102                             0, 1, 6);
103                 }
104             } else {
105                 sCameraSupportsHal2[i] = false;
106             }
107         }
108     }
109 
TearDownModule()110     void TearDownModule() {
111         hw_module_t *module = reinterpret_cast<hw_module_t*>(sCameraModule);
112         ASSERT_EQ(0, HWModuleHelpers::closeModule(module));
113     }
114 
getCameraModule()115     static const camera_module_t *getCameraModule() {
116         return sCameraModule;
117     }
118 
getNumCameras()119     static int getNumCameras() {
120         return sNumCameras;
121     }
122 
isHal2Supported(int id)123     static bool isHal2Supported(int id) {
124         return sCameraSupportsHal2[id];
125     }
126 
openCameraDevice(int id)127     static camera2_device_t *openCameraDevice(int id) {
128         ALOGV("Opening camera %d", id);
129         if (NULL == sCameraSupportsHal2) return NULL;
130         if (id >= sNumCameras) return NULL;
131         if (!sCameraSupportsHal2[id]) return NULL;
132 
133         hw_device_t *device = NULL;
134         const camera_module_t *cam_module = getCameraModule();
135         if (cam_module == NULL) {
136             return NULL;
137         }
138 
139         char camId[10];
140         int res;
141 
142         snprintf(camId, 10, "%d", id);
143         res = cam_module->common.methods->open(
144             (const hw_module_t*)cam_module,
145             camId,
146             &device);
147         if (res != NO_ERROR || device == NULL) {
148             return NULL;
149         }
150         camera2_device_t *cam_device =
151                 reinterpret_cast<camera2_device_t*>(device);
152         return cam_device;
153     }
154 
configureCameraDevice(camera2_device_t * dev,MetadataQueue & requestQueue,MetadataQueue & frameQueue,NotifierListener & listener)155     static status_t configureCameraDevice(camera2_device_t *dev,
156             MetadataQueue &requestQueue,
157             MetadataQueue  &frameQueue,
158             NotifierListener &listener) {
159 
160         status_t err;
161 
162         err = dev->ops->set_request_queue_src_ops(dev,
163                 requestQueue.getToConsumerInterface());
164         if (err != OK) return err;
165 
166         requestQueue.setFromConsumerInterface(dev);
167 
168         err = dev->ops->set_frame_queue_dst_ops(dev,
169                 frameQueue.getToProducerInterface());
170         if (err != OK) return err;
171 
172         err = listener.getNotificationsFrom(dev);
173         if (err != OK) return err;
174 
175         return OK;
176     }
177 
closeCameraDevice(camera2_device_t ** cam_dev)178     static status_t closeCameraDevice(camera2_device_t **cam_dev) {
179         int res;
180         if (*cam_dev == NULL ) return OK;
181 
182         ALOGV("Closing camera %p", cam_dev);
183 
184         hw_device_t *dev = reinterpret_cast<hw_device_t *>(*cam_dev);
185         res = dev->close(dev);
186         *cam_dev = NULL;
187         return res;
188     }
189 
setUpCamera(int id)190     void setUpCamera(int id) {
191         ASSERT_GT(sNumCameras, id);
192         status_t res;
193 
194         if (mDevice != NULL) {
195             closeCameraDevice(&mDevice);
196         }
197         mId = id;
198         mDevice = openCameraDevice(mId);
199         ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device";
200 
201         camera_info info;
202         res = sCameraModule->get_camera_info(id, &info);
203         ASSERT_EQ(OK, res);
204 
205         mStaticInfo = info.static_camera_characteristics;
206 
207         res = configureCameraDevice(mDevice,
208                 mRequests,
209                 mFrames,
210                 mNotifications);
211         ASSERT_EQ(OK, res) << "Failure to configure camera device";
212 
213     }
214 
setUpStream(sp<IGraphicBufferProducer> consumer,int width,int height,int format,int * id)215     void setUpStream(sp<IGraphicBufferProducer> consumer,
216             int width, int height, int format, int *id) {
217         status_t res;
218 
219         StreamAdapter* stream = new StreamAdapter(consumer);
220 
221         ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height);
222         res = stream->connectToDevice(mDevice, width, height, format);
223         ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: "
224                                  << strerror(-res);
225         mStreams.push_back(stream);
226 
227         *id = stream->getId();
228     }
229 
disconnectStream(int id)230     void disconnectStream(int id) {
231         status_t res;
232         unsigned int i=0;
233         for (; i < mStreams.size(); i++) {
234             if (mStreams[i]->getId() == id) {
235                 res = mStreams[i]->disconnect();
236                 ASSERT_EQ(NO_ERROR, res) <<
237                         "Failed to disconnect stream " << id;
238                 break;
239             }
240         }
241         ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
242     }
243 
getResolutionList(int32_t format,const int32_t ** list,size_t * count)244     void getResolutionList(int32_t format,
245             const int32_t **list,
246             size_t *count) {
247         ALOGV("Getting resolutions for format %x", format);
248         status_t res;
249         if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
250             camera_metadata_ro_entry_t availableFormats;
251             res = find_camera_metadata_ro_entry(mStaticInfo,
252                     ANDROID_SCALER_AVAILABLE_FORMATS,
253                     &availableFormats);
254             ASSERT_EQ(OK, res);
255 
256             uint32_t formatIdx;
257             for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
258                 if (availableFormats.data.i32[formatIdx] == format) break;
259             }
260             ASSERT_NE(availableFormats.count, formatIdx)
261                 << "No support found for format 0x" << std::hex << format;
262         }
263 
264         camera_metadata_ro_entry_t availableSizes;
265         if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
266             res = find_camera_metadata_ro_entry(mStaticInfo,
267                     ANDROID_SCALER_AVAILABLE_RAW_SIZES,
268                     &availableSizes);
269         } else if (format == HAL_PIXEL_FORMAT_BLOB) {
270             res = find_camera_metadata_ro_entry(mStaticInfo,
271                     ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
272                     &availableSizes);
273         } else {
274             res = find_camera_metadata_ro_entry(mStaticInfo,
275                     ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
276                     &availableSizes);
277         }
278         ASSERT_EQ(OK, res);
279 
280         *list = availableSizes.data.i32;
281         *count = availableSizes.count;
282     }
283 
waitUntilDrained()284     status_t waitUntilDrained() {
285         static const uint32_t kSleepTime = 50000; // 50 ms
286         static const uint32_t kMaxSleepTime = 10000000; // 10 s
287         ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId);
288 
289         // TODO: Set up notifications from HAL, instead of sleeping here
290         uint32_t totalTime = 0;
291         while (mDevice->ops->get_in_progress_count(mDevice) > 0) {
292             usleep(kSleepTime);
293             totalTime += kSleepTime;
294             if (totalTime > kMaxSleepTime) {
295                 ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__,
296                         mDevice->ops->get_in_progress_count(mDevice), totalTime);
297                 return TIMED_OUT;
298             }
299         }
300         ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId);
301         return OK;
302     }
303 
SetUp()304     virtual void SetUp() {
305         TEST_EXTENSION_FORKING_SET_UP;
306 
307         SetUpModule();
308 
309         const ::testing::TestInfo* const testInfo =
310                 ::testing::UnitTest::GetInstance()->current_test_info();
311         (void)testInfo;
312 
313         ALOGV("*** Starting test %s in test case %s", testInfo->name(),
314               testInfo->test_case_name());
315         mDevice = NULL;
316     }
317 
TearDown()318     virtual void TearDown() {
319         TEST_EXTENSION_FORKING_TEAR_DOWN;
320 
321         for (unsigned int i = 0; i < mStreams.size(); i++) {
322             delete mStreams[i];
323         }
324         if (mDevice != NULL) {
325             closeCameraDevice(&mDevice);
326         }
327 
328         TearDownModule();
329     }
330 
331     int mId;
332     camera2_device    *mDevice;
333     const camera_metadata_t *mStaticInfo;
334 
335     MetadataQueue    mRequests;
336     MetadataQueue    mFrames;
337     NotifierListener mNotifications;
338 
339     Vector<StreamAdapter*> mStreams;
340 
341   private:
342     static camera_module_t *sCameraModule;
343     static int              sNumCameras;
344     static bool            *sCameraSupportsHal2;
345 };
346 
347 camera_module_t *Camera2Test::sCameraModule = NULL;
348 bool *Camera2Test::sCameraSupportsHal2      = NULL;
349 int Camera2Test::sNumCameras                = 0;
350 
351 static const nsecs_t USEC = 1000;
352 static const nsecs_t MSEC = 1000*USEC;
353 static const nsecs_t SEC = 1000*MSEC;
354 
355 
TEST_F(Camera2Test,OpenClose)356 TEST_F(Camera2Test, OpenClose) {
357 
358     TEST_EXTENSION_FORKING_INIT;
359 
360     status_t res;
361 
362     for (int id = 0; id < getNumCameras(); id++) {
363         if (!isHal2Supported(id)) continue;
364 
365         camera2_device_t *d = openCameraDevice(id);
366         ASSERT_TRUE(NULL != d) << "Failed to open camera device";
367 
368         res = closeCameraDevice(&d);
369         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
370     }
371 }
372 
TEST_F(Camera2Test,Capture1Raw)373 TEST_F(Camera2Test, Capture1Raw) {
374 
375     TEST_EXTENSION_FORKING_INIT;
376 
377     status_t res;
378 
379     for (int id = 0; id < getNumCameras(); id++) {
380         if (!isHal2Supported(id)) continue;
381 
382         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
383 
384         sp<IGraphicBufferProducer> bqProducer;
385         sp<IGraphicBufferConsumer> bqConsumer;
386         BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
387         sp<CpuConsumer> rawConsumer = new CpuConsumer(bqConsumer, 1);
388         sp<FrameWaiter> rawWaiter = new FrameWaiter();
389         rawConsumer->setFrameAvailableListener(rawWaiter);
390 
391         const int32_t *rawResolutions;
392         size_t   rawResolutionsCount;
393 
394         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
395 
396         getResolutionList(format,
397                 &rawResolutions, &rawResolutionsCount);
398 
399         if (rawResolutionsCount <= 0) {
400             const ::testing::TestInfo* const test_info =
401                 ::testing::UnitTest::GetInstance()->current_test_info();
402             std::cerr << "Skipping test "
403                       << test_info->test_case_name() << "."
404                       << test_info->name()
405                       << " because the optional format was not available: "
406                       << "RAW_SENSOR" << std::endl;
407             return;
408         }
409 
410         ASSERT_LT((size_t)0, rawResolutionsCount);
411 
412         // Pick first available raw resolution
413         int width = rawResolutions[0];
414         int height = rawResolutions[1];
415 
416         int streamId;
417         ASSERT_NO_FATAL_FAILURE(
418             setUpStream(bqProducer, width, height, format, &streamId) );
419 
420         camera_metadata_t *request;
421         request = allocate_camera_metadata(20, 2000);
422 
423         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
424         add_camera_metadata_entry(request,
425                 ANDROID_REQUEST_METADATA_MODE,
426                 (void**)&metadataMode, 1);
427         uint32_t outputStreams = streamId;
428         add_camera_metadata_entry(request,
429                 ANDROID_REQUEST_OUTPUT_STREAMS,
430                 (void**)&outputStreams, 1);
431 
432         uint64_t exposureTime = 10*MSEC;
433         add_camera_metadata_entry(request,
434                 ANDROID_SENSOR_EXPOSURE_TIME,
435                 (void**)&exposureTime, 1);
436         uint64_t frameDuration = 30*MSEC;
437         add_camera_metadata_entry(request,
438                 ANDROID_SENSOR_FRAME_DURATION,
439                 (void**)&frameDuration, 1);
440         uint32_t sensitivity = 100;
441         add_camera_metadata_entry(request,
442                 ANDROID_SENSOR_SENSITIVITY,
443                 (void**)&sensitivity, 1);
444         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
445         add_camera_metadata_entry(request,
446                 ANDROID_REQUEST_TYPE,
447                 (void**)&requestType, 1);
448 
449         uint32_t hourOfDay = 12;
450         add_camera_metadata_entry(request,
451                 0x80000000, // EMULATOR_HOUROFDAY
452                 &hourOfDay, 1);
453 
454         IF_ALOGV() {
455             std::cout << "Input request: " << std::endl;
456             dump_indented_camera_metadata(request, 0, 1, 2);
457         }
458 
459         res = mRequests.enqueue(request);
460         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
461 
462         res = mFrames.waitForBuffer(exposureTime + SEC);
463         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
464 
465         camera_metadata_t *frame;
466         res = mFrames.dequeue(&frame);
467         ASSERT_EQ(NO_ERROR, res);
468         ASSERT_TRUE(frame != NULL);
469 
470         IF_ALOGV() {
471             std::cout << "Output frame:" << std::endl;
472             dump_indented_camera_metadata(frame, 0, 1, 2);
473         }
474 
475         res = rawWaiter->waitForFrame(exposureTime + SEC);
476         ASSERT_EQ(NO_ERROR, res);
477 
478         CpuConsumer::LockedBuffer buffer;
479         res = rawConsumer->lockNextBuffer(&buffer);
480         ASSERT_EQ(NO_ERROR, res);
481 
482         IF_ALOGV() {
483             const char *dumpname =
484                     "/data/local/tmp/camera2_test-capture1raw-dump.raw";
485             ALOGV("Dumping raw buffer to %s", dumpname);
486             // Write to file
487             std::ofstream rawFile(dumpname);
488             size_t bpp = 2;
489             for (unsigned int y = 0; y < buffer.height; y++) {
490                 rawFile.write(
491                         (const char *)(buffer.data + y * buffer.stride * bpp),
492                         buffer.width * bpp);
493             }
494             rawFile.close();
495         }
496 
497         res = rawConsumer->unlockBuffer(buffer);
498         ASSERT_EQ(NO_ERROR, res);
499 
500         ASSERT_EQ(OK, waitUntilDrained());
501         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
502 
503         res = closeCameraDevice(&mDevice);
504         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
505 
506     }
507 }
508 
TEST_F(Camera2Test,CaptureBurstRaw)509 TEST_F(Camera2Test, CaptureBurstRaw) {
510 
511     TEST_EXTENSION_FORKING_INIT;
512 
513     status_t res;
514 
515     for (int id = 0; id < getNumCameras(); id++) {
516         if (!isHal2Supported(id)) continue;
517 
518         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
519 
520         sp<IGraphicBufferProducer> bqProducer;
521         sp<IGraphicBufferConsumer> bqConsumer;
522         BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
523         sp<CpuConsumer> rawConsumer = new CpuConsumer(bqConsumer, 1);
524         sp<FrameWaiter> rawWaiter = new FrameWaiter();
525         rawConsumer->setFrameAvailableListener(rawWaiter);
526 
527         const int32_t *rawResolutions;
528         size_t    rawResolutionsCount;
529 
530         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
531 
532         getResolutionList(format,
533                 &rawResolutions, &rawResolutionsCount);
534 
535         if (rawResolutionsCount <= 0) {
536             const ::testing::TestInfo* const test_info =
537                 ::testing::UnitTest::GetInstance()->current_test_info();
538             std::cerr << "Skipping test "
539                       << test_info->test_case_name() << "."
540                       << test_info->name()
541                       << " because the optional format was not available: "
542                       << "RAW_SENSOR" << std::endl;
543             return;
544         }
545 
546         ASSERT_LT((uint32_t)0, rawResolutionsCount);
547 
548         // Pick first available raw resolution
549         int width = rawResolutions[0];
550         int height = rawResolutions[1];
551 
552         int streamId;
553         ASSERT_NO_FATAL_FAILURE(
554             setUpStream(bqProducer, width, height, format, &streamId) );
555 
556         camera_metadata_t *request;
557         request = allocate_camera_metadata(20, 2000);
558 
559         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
560         add_camera_metadata_entry(request,
561                 ANDROID_REQUEST_METADATA_MODE,
562                 (void**)&metadataMode, 1);
563         uint32_t outputStreams = streamId;
564         add_camera_metadata_entry(request,
565                 ANDROID_REQUEST_OUTPUT_STREAMS,
566                 (void**)&outputStreams, 1);
567 
568         uint64_t frameDuration = 30*MSEC;
569         add_camera_metadata_entry(request,
570                 ANDROID_SENSOR_FRAME_DURATION,
571                 (void**)&frameDuration, 1);
572         uint32_t sensitivity = 100;
573         add_camera_metadata_entry(request,
574                 ANDROID_SENSOR_SENSITIVITY,
575                 (void**)&sensitivity, 1);
576         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
577         add_camera_metadata_entry(request,
578                 ANDROID_REQUEST_TYPE,
579                 (void**)&requestType, 1);
580 
581         uint32_t hourOfDay = 12;
582         add_camera_metadata_entry(request,
583                 0x80000000, // EMULATOR_HOUROFDAY
584                 &hourOfDay, 1);
585 
586         IF_ALOGV() {
587             std::cout << "Input request template: " << std::endl;
588             dump_indented_camera_metadata(request, 0, 1, 2);
589         }
590 
591         int numCaptures = 10;
592 
593         // Enqueue numCaptures requests with increasing exposure time
594 
595         uint64_t exposureTime = 100 * USEC;
596         for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) {
597             camera_metadata_t *req;
598             req = allocate_camera_metadata(20, 2000);
599             append_camera_metadata(req, request);
600 
601             add_camera_metadata_entry(req,
602                     ANDROID_SENSOR_EXPOSURE_TIME,
603                     (void**)&exposureTime, 1);
604             exposureTime *= 2;
605 
606             res = mRequests.enqueue(req);
607             ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: "
608                     << strerror(-res);
609         }
610 
611         // Get frames and image buffers one by one
612         uint64_t expectedExposureTime = 100 * USEC;
613         for (int frameCount = 0; frameCount < 10; frameCount++) {
614             res = mFrames.waitForBuffer(SEC + expectedExposureTime);
615             ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
616 
617             camera_metadata_t *frame;
618             res = mFrames.dequeue(&frame);
619             ASSERT_EQ(NO_ERROR, res);
620             ASSERT_TRUE(frame != NULL);
621 
622             camera_metadata_entry_t frameNumber;
623             res = find_camera_metadata_entry(frame,
624                     ANDROID_REQUEST_FRAME_COUNT,
625                     &frameNumber);
626             ASSERT_EQ(NO_ERROR, res);
627             ASSERT_EQ(frameCount, *frameNumber.data.i32);
628 
629             res = rawWaiter->waitForFrame(SEC + expectedExposureTime);
630             ASSERT_EQ(NO_ERROR, res) <<
631                     "Never got raw data for capture " << frameCount;
632 
633             CpuConsumer::LockedBuffer buffer;
634             res = rawConsumer->lockNextBuffer(&buffer);
635             ASSERT_EQ(NO_ERROR, res);
636 
637             IF_ALOGV() {
638                 char dumpname[60];
639                 snprintf(dumpname, 60,
640                         "/data/local/tmp/camera2_test-"
641                         "captureBurstRaw-dump_%d.raw",
642                         frameCount);
643                 ALOGV("Dumping raw buffer to %s", dumpname);
644                 // Write to file
645                 std::ofstream rawFile(dumpname);
646                 for (unsigned int y = 0; y < buffer.height; y++) {
647                     rawFile.write(
648                             (const char *)(buffer.data + y * buffer.stride * 2),
649                             buffer.width * 2);
650                 }
651                 rawFile.close();
652             }
653 
654             res = rawConsumer->unlockBuffer(buffer);
655             ASSERT_EQ(NO_ERROR, res);
656 
657             expectedExposureTime *= 2;
658         }
659     }
660 }
661 
TEST_F(Camera2Test,ConstructDefaultRequests)662 TEST_F(Camera2Test, ConstructDefaultRequests) {
663 
664     TEST_EXTENSION_FORKING_INIT;
665 
666     status_t res;
667 
668     for (int id = 0; id < getNumCameras(); id++) {
669         if (!isHal2Supported(id)) continue;
670 
671         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
672 
673         for (int i = CAMERA2_TEMPLATE_PREVIEW; i < CAMERA2_TEMPLATE_COUNT;
674              i++) {
675             camera_metadata_t *request = NULL;
676             res = mDevice->ops->construct_default_request(mDevice,
677                     i,
678                     &request);
679             EXPECT_EQ(NO_ERROR, res) <<
680                     "Unable to construct request from template type " << i;
681             EXPECT_TRUE(request != NULL);
682             EXPECT_LT((size_t)0, get_camera_metadata_entry_count(request));
683             EXPECT_LT((size_t)0, get_camera_metadata_data_count(request));
684 
685             IF_ALOGV() {
686                 std::cout << "  ** Template type " << i << ":"<<std::endl;
687                 dump_indented_camera_metadata(request, 0, 2, 4);
688             }
689 
690             free_camera_metadata(request);
691         }
692     }
693 }
694 
TEST_F(Camera2Test,Capture1Jpeg)695 TEST_F(Camera2Test, Capture1Jpeg) {
696     status_t res;
697 
698     for (int id = 0; id < getNumCameras(); id++) {
699         if (!isHal2Supported(id)) continue;
700 
701         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
702 
703         sp<IGraphicBufferProducer> bqProducer;
704         sp<IGraphicBufferConsumer> bqConsumer;
705         BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
706         sp<CpuConsumer> jpegConsumer = new CpuConsumer(bqConsumer, 1);
707         sp<FrameWaiter> jpegWaiter = new FrameWaiter();
708         jpegConsumer->setFrameAvailableListener(jpegWaiter);
709 
710         const int32_t *jpegResolutions;
711         size_t   jpegResolutionsCount;
712 
713         int format = HAL_PIXEL_FORMAT_BLOB;
714 
715         getResolutionList(format,
716                 &jpegResolutions, &jpegResolutionsCount);
717         ASSERT_LT((size_t)0, jpegResolutionsCount);
718 
719         // Pick first available JPEG resolution
720         int width = jpegResolutions[0];
721         int height = jpegResolutions[1];
722 
723         int streamId;
724         ASSERT_NO_FATAL_FAILURE(
725             setUpStream(bqProducer, width, height, format, &streamId) );
726 
727         camera_metadata_t *request;
728         request = allocate_camera_metadata(20, 2000);
729 
730         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
731         add_camera_metadata_entry(request,
732                 ANDROID_REQUEST_METADATA_MODE,
733                 (void**)&metadataMode, 1);
734         uint32_t outputStreams = streamId;
735         add_camera_metadata_entry(request,
736                 ANDROID_REQUEST_OUTPUT_STREAMS,
737                 (void**)&outputStreams, 1);
738 
739         uint64_t exposureTime = 10*MSEC;
740         add_camera_metadata_entry(request,
741                 ANDROID_SENSOR_EXPOSURE_TIME,
742                 (void**)&exposureTime, 1);
743         uint64_t frameDuration = 30*MSEC;
744         add_camera_metadata_entry(request,
745                 ANDROID_SENSOR_FRAME_DURATION,
746                 (void**)&frameDuration, 1);
747         uint32_t sensitivity = 100;
748         add_camera_metadata_entry(request,
749                 ANDROID_SENSOR_SENSITIVITY,
750                 (void**)&sensitivity, 1);
751         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
752         add_camera_metadata_entry(request,
753                 ANDROID_REQUEST_TYPE,
754                 (void**)&requestType, 1);
755 
756         uint32_t hourOfDay = 12;
757         add_camera_metadata_entry(request,
758                 0x80000000, // EMULATOR_HOUROFDAY
759                 &hourOfDay, 1);
760 
761         IF_ALOGV() {
762             std::cout << "Input request: " << std::endl;
763             dump_indented_camera_metadata(request, 0, 1, 4);
764         }
765 
766         res = mRequests.enqueue(request);
767         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
768 
769         res = mFrames.waitForBuffer(exposureTime + SEC);
770         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
771 
772         camera_metadata_t *frame;
773         res = mFrames.dequeue(&frame);
774         ASSERT_EQ(NO_ERROR, res);
775         ASSERT_TRUE(frame != NULL);
776 
777         IF_ALOGV() {
778             std::cout << "Output frame:" << std::endl;
779             dump_indented_camera_metadata(frame, 0, 1, 4);
780         }
781 
782         res = jpegWaiter->waitForFrame(exposureTime + SEC);
783         ASSERT_EQ(NO_ERROR, res);
784 
785         CpuConsumer::LockedBuffer buffer;
786         res = jpegConsumer->lockNextBuffer(&buffer);
787         ASSERT_EQ(NO_ERROR, res);
788 
789         IF_ALOGV() {
790             const char *dumpname =
791                     "/data/local/tmp/camera2_test-capture1jpeg-dump.jpeg";
792             ALOGV("Dumping raw buffer to %s", dumpname);
793             // Write to file
794             std::ofstream jpegFile(dumpname);
795             size_t bpp = 1;
796             for (unsigned int y = 0; y < buffer.height; y++) {
797                 jpegFile.write(
798                         (const char *)(buffer.data + y * buffer.stride * bpp),
799                         buffer.width * bpp);
800             }
801             jpegFile.close();
802         }
803 
804         res = jpegConsumer->unlockBuffer(buffer);
805         ASSERT_EQ(NO_ERROR, res);
806 
807         ASSERT_EQ(OK, waitUntilDrained());
808         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
809 
810         res = closeCameraDevice(&mDevice);
811         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
812 
813     }
814 }
815 
816 } // namespace tests
817 } // namespace camera2
818 } // namespace android
819