• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2_test"
18 //#define LOG_NDEBUG 0
19 
20 #include <utils/Log.h>
21 #include <gtest/gtest.h>
22 #include <iostream>
23 #include <fstream>
24 
25 #include <utils/Vector.h>
26 #include <gui/CpuConsumer.h>
27 #include <ui/PixelFormat.h>
28 #include <system/camera_metadata.h>
29 
30 #include "camera2_utils.h"
31 #include "TestExtensions.h"
32 
33 namespace android {
34 namespace camera2 {
35 namespace tests {
36 
37 class Camera2Test: public testing::Test {
38   public:
SetUpModule()39     void SetUpModule() {
40         int res;
41 
42         hw_module_t *module = NULL;
43         res = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
44                 (const hw_module_t **)&module);
45 
46         ASSERT_EQ(0, res)
47                 << "Failure opening camera hardware module: " << res;
48         ASSERT_TRUE(NULL != module)
49                 << "No camera module was set by hw_get_module";
50 
51         IF_ALOGV() {
52             std::cout << "  Camera module name: "
53                     << module->name << std::endl;
54             std::cout << "  Camera module author: "
55                     << module->author << std::endl;
56             std::cout << "  Camera module API version: 0x" << std::hex
57                     << module->module_api_version << std::endl;
58             std::cout << "  Camera module HAL API version: 0x" << std::hex
59                     << module->hal_api_version << std::endl;
60         }
61 
62         int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0;
63         ASSERT_LE(version2_0, module->module_api_version)
64                 << "Camera module version is 0x"
65                 << std::hex << module->module_api_version
66                 << ", should be at least 2.0. (0x"
67                 << std::hex << CAMERA_MODULE_API_VERSION_2_0 << ")";
68 
69         sCameraModule = reinterpret_cast<camera_module_t*>(module);
70 
71         sNumCameras = sCameraModule->get_number_of_cameras();
72         ASSERT_LT(0, sNumCameras) << "No camera devices available!";
73 
74         IF_ALOGV() {
75             std::cout << "  Camera device count: " << sNumCameras << std::endl;
76         }
77 
78         sCameraSupportsHal2 = new bool[sNumCameras];
79 
80         for (int i = 0; i < sNumCameras; i++) {
81             camera_info info;
82             res = sCameraModule->get_camera_info(i, &info);
83             ASSERT_EQ(0, res)
84                     << "Failure getting camera info for camera " << i;
85             IF_ALOGV() {
86                 std::cout << "  Camera device: " << std::dec
87                           << i << std::endl;;
88                 std::cout << "    Facing: " << std::dec
89                           << info.facing  << std::endl;
90                 std::cout << "    Orientation: " << std::dec
91                           << info.orientation  << std::endl;
92                 std::cout << "    Version: 0x" << std::hex <<
93                         info.device_version  << std::endl;
94             }
95             if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0) {
96                 sCameraSupportsHal2[i] = true;
97                 ASSERT_TRUE(NULL != info.static_camera_characteristics);
98                 IF_ALOGV() {
99                     std::cout << "    Static camera metadata:"  << std::endl;
100                     dump_indented_camera_metadata(info.static_camera_characteristics,
101                             0, 1, 6);
102                 }
103             } else {
104                 sCameraSupportsHal2[i] = false;
105             }
106         }
107     }
108 
TearDownModule()109     void TearDownModule() {
110         hw_module_t *module = reinterpret_cast<hw_module_t*>(sCameraModule);
111         ASSERT_EQ(0, HWModuleHelpers::closeModule(module));
112     }
113 
getCameraModule()114     static const camera_module_t *getCameraModule() {
115         return sCameraModule;
116     }
117 
getNumCameras()118     static int getNumCameras() {
119         return sNumCameras;
120     }
121 
isHal2Supported(int id)122     static bool isHal2Supported(int id) {
123         return sCameraSupportsHal2[id];
124     }
125 
openCameraDevice(int id)126     static camera2_device_t *openCameraDevice(int id) {
127         ALOGV("Opening camera %d", id);
128         if (NULL == sCameraSupportsHal2) return NULL;
129         if (id >= sNumCameras) return NULL;
130         if (!sCameraSupportsHal2[id]) return NULL;
131 
132         hw_device_t *device = NULL;
133         const camera_module_t *cam_module = getCameraModule();
134         if (cam_module == NULL) {
135             return NULL;
136         }
137 
138         char camId[10];
139         int res;
140 
141         snprintf(camId, 10, "%d", id);
142         res = cam_module->common.methods->open(
143             (const hw_module_t*)cam_module,
144             camId,
145             &device);
146         if (res != NO_ERROR || device == NULL) {
147             return NULL;
148         }
149         camera2_device_t *cam_device =
150                 reinterpret_cast<camera2_device_t*>(device);
151         return cam_device;
152     }
153 
configureCameraDevice(camera2_device_t * dev,MetadataQueue & requestQueue,MetadataQueue & frameQueue,NotifierListener & listener)154     static status_t configureCameraDevice(camera2_device_t *dev,
155             MetadataQueue &requestQueue,
156             MetadataQueue  &frameQueue,
157             NotifierListener &listener) {
158 
159         status_t err;
160 
161         err = dev->ops->set_request_queue_src_ops(dev,
162                 requestQueue.getToConsumerInterface());
163         if (err != OK) return err;
164 
165         requestQueue.setFromConsumerInterface(dev);
166 
167         err = dev->ops->set_frame_queue_dst_ops(dev,
168                 frameQueue.getToProducerInterface());
169         if (err != OK) return err;
170 
171         err = listener.getNotificationsFrom(dev);
172         if (err != OK) return err;
173 
174         vendor_tag_query_ops_t *vendor_metadata_tag_ops;
175         err = dev->ops->get_metadata_vendor_tag_ops(dev, &vendor_metadata_tag_ops);
176         if (err != OK) return err;
177 
178         err = set_camera_metadata_vendor_tag_ops(vendor_metadata_tag_ops);
179         if (err != OK) return err;
180 
181         return OK;
182     }
183 
closeCameraDevice(camera2_device_t * cam_dev)184     static status_t closeCameraDevice(camera2_device_t *cam_dev) {
185         int res;
186         ALOGV("Closing camera %p", cam_dev);
187 
188         hw_device_t *dev = reinterpret_cast<hw_device_t *>(cam_dev);
189         res = dev->close(dev);
190         return res;
191     }
192 
setUpCamera(int id)193     void setUpCamera(int id) {
194         ASSERT_GT(sNumCameras, id);
195         status_t res;
196 
197         if (mDevice != NULL) {
198             closeCameraDevice(mDevice);
199         }
200         mDevice = openCameraDevice(id);
201         ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device";
202 
203         camera_info info;
204         res = sCameraModule->get_camera_info(id, &info);
205         ASSERT_EQ(OK, res);
206 
207         mStaticInfo = info.static_camera_characteristics;
208 
209         res = configureCameraDevice(mDevice,
210                 mRequests,
211                 mFrames,
212                 mNotifications);
213         ASSERT_EQ(OK, res) << "Failure to configure camera device";
214 
215     }
216 
setUpStream(sp<IGraphicBufferProducer> consumer,int width,int height,int format,int * id)217     void setUpStream(sp<IGraphicBufferProducer> consumer,
218             int width, int height, int format, int *id) {
219         status_t res;
220 
221         StreamAdapter* stream = new StreamAdapter(consumer);
222 
223         ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height);
224         res = stream->connectToDevice(mDevice, width, height, format);
225         ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: "
226                                  << strerror(-res);
227         mStreams.push_back(stream);
228 
229         *id = stream->getId();
230     }
231 
disconnectStream(int id)232     void disconnectStream(int id) {
233         status_t res;
234         unsigned int i=0;
235         for (; i < mStreams.size(); i++) {
236             if (mStreams[i]->getId() == id) {
237                 res = mStreams[i]->disconnect();
238                 ASSERT_EQ(NO_ERROR, res) <<
239                         "Failed to disconnect stream " << id;
240                 break;
241             }
242         }
243         ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
244     }
245 
getResolutionList(int32_t format,const int32_t ** list,size_t * count)246     void getResolutionList(int32_t format,
247             const int32_t **list,
248             size_t *count) {
249         ALOGV("Getting resolutions for format %x", format);
250         status_t res;
251         if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
252             camera_metadata_ro_entry_t availableFormats;
253             res = find_camera_metadata_ro_entry(mStaticInfo,
254                     ANDROID_SCALER_AVAILABLE_FORMATS,
255                     &availableFormats);
256             ASSERT_EQ(OK, res);
257 
258             uint32_t formatIdx;
259             for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
260                 if (availableFormats.data.i32[formatIdx] == format) break;
261             }
262             ASSERT_NE(availableFormats.count, formatIdx)
263                 << "No support found for format 0x" << std::hex << format;
264         }
265 
266         camera_metadata_ro_entry_t availableSizes;
267         if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
268             res = find_camera_metadata_ro_entry(mStaticInfo,
269                     ANDROID_SCALER_AVAILABLE_RAW_SIZES,
270                     &availableSizes);
271         } else if (format == HAL_PIXEL_FORMAT_BLOB) {
272             res = find_camera_metadata_ro_entry(mStaticInfo,
273                     ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
274                     &availableSizes);
275         } else {
276             res = find_camera_metadata_ro_entry(mStaticInfo,
277                     ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
278                     &availableSizes);
279         }
280         ASSERT_EQ(OK, res);
281 
282         *list = availableSizes.data.i32;
283         *count = availableSizes.count;
284     }
285 
waitUntilDrained()286     status_t waitUntilDrained() {
287         static const uint32_t kSleepTime = 50000; // 50 ms
288         static const uint32_t kMaxSleepTime = 10000000; // 10 s
289         ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId);
290 
291         // TODO: Set up notifications from HAL, instead of sleeping here
292         uint32_t totalTime = 0;
293         while (mDevice->ops->get_in_progress_count(mDevice) > 0) {
294             usleep(kSleepTime);
295             totalTime += kSleepTime;
296             if (totalTime > kMaxSleepTime) {
297                 ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__,
298                         mDevice->ops->get_in_progress_count(mDevice), totalTime);
299                 return TIMED_OUT;
300             }
301         }
302         ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId);
303         return OK;
304     }
305 
SetUp()306     virtual void SetUp() {
307         TEST_EXTENSION_FORKING_SET_UP;
308 
309         SetUpModule();
310 
311         const ::testing::TestInfo* const testInfo =
312                 ::testing::UnitTest::GetInstance()->current_test_info();
313         (void)testInfo;
314 
315         ALOGV("*** Starting test %s in test case %s", testInfo->name(),
316               testInfo->test_case_name());
317         mDevice = NULL;
318     }
319 
TearDown()320     virtual void TearDown() {
321         TEST_EXTENSION_FORKING_TEAR_DOWN;
322 
323         for (unsigned int i = 0; i < mStreams.size(); i++) {
324             delete mStreams[i];
325         }
326         if (mDevice != NULL) {
327             closeCameraDevice(mDevice);
328         }
329 
330         TearDownModule();
331     }
332 
333     camera2_device    *mDevice;
334     const camera_metadata_t *mStaticInfo;
335 
336     MetadataQueue    mRequests;
337     MetadataQueue    mFrames;
338     NotifierListener mNotifications;
339 
340     Vector<StreamAdapter*> mStreams;
341 
342   private:
343     static camera_module_t *sCameraModule;
344     static int              sNumCameras;
345     static bool            *sCameraSupportsHal2;
346 };
347 
348 camera_module_t *Camera2Test::sCameraModule = NULL;
349 bool *Camera2Test::sCameraSupportsHal2      = NULL;
350 int Camera2Test::sNumCameras                = 0;
351 
352 static const nsecs_t USEC = 1000;
353 static const nsecs_t MSEC = 1000*USEC;
354 static const nsecs_t SEC = 1000*MSEC;
355 
356 
TEST_F(Camera2Test,OpenClose)357 TEST_F(Camera2Test, OpenClose) {
358 
359     TEST_EXTENSION_FORKING_INIT;
360 
361     status_t res;
362 
363     for (int id = 0; id < getNumCameras(); id++) {
364         if (!isHal2Supported(id)) continue;
365 
366         camera2_device_t *d = openCameraDevice(id);
367         ASSERT_TRUE(NULL != d) << "Failed to open camera device";
368 
369         res = closeCameraDevice(d);
370         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
371     }
372 }
373 
TEST_F(Camera2Test,Capture1Raw)374 TEST_F(Camera2Test, Capture1Raw) {
375 
376     TEST_EXTENSION_FORKING_INIT;
377 
378     status_t res;
379 
380     for (int id = 0; id < getNumCameras(); id++) {
381         if (!isHal2Supported(id)) continue;
382 
383         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
384 
385         sp<CpuConsumer> rawConsumer = new CpuConsumer(1);
386         sp<FrameWaiter> rawWaiter = new FrameWaiter();
387         rawConsumer->setFrameAvailableListener(rawWaiter);
388 
389         const int32_t *rawResolutions;
390         size_t   rawResolutionsCount;
391 
392         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
393 
394         getResolutionList(format,
395                 &rawResolutions, &rawResolutionsCount);
396 
397         if (rawResolutionsCount <= 0) {
398             const ::testing::TestInfo* const test_info =
399                 ::testing::UnitTest::GetInstance()->current_test_info();
400             std::cerr << "Skipping test "
401                       << test_info->test_case_name() << "."
402                       << test_info->name()
403                       << " because the optional format was not available: "
404                       << "RAW_SENSOR" << std::endl;
405             return;
406         }
407 
408         ASSERT_LT((size_t)0, rawResolutionsCount);
409 
410         // Pick first available raw resolution
411         int width = rawResolutions[0];
412         int height = rawResolutions[1];
413 
414         int streamId;
415         ASSERT_NO_FATAL_FAILURE(
416             setUpStream(rawConsumer->getProducerInterface(),
417                     width, height, format, &streamId) );
418 
419         camera_metadata_t *request;
420         request = allocate_camera_metadata(20, 2000);
421 
422         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
423         add_camera_metadata_entry(request,
424                 ANDROID_REQUEST_METADATA_MODE,
425                 (void**)&metadataMode, 1);
426         uint32_t outputStreams = streamId;
427         add_camera_metadata_entry(request,
428                 ANDROID_REQUEST_OUTPUT_STREAMS,
429                 (void**)&outputStreams, 1);
430 
431         uint64_t exposureTime = 10*MSEC;
432         add_camera_metadata_entry(request,
433                 ANDROID_SENSOR_EXPOSURE_TIME,
434                 (void**)&exposureTime, 1);
435         uint64_t frameDuration = 30*MSEC;
436         add_camera_metadata_entry(request,
437                 ANDROID_SENSOR_FRAME_DURATION,
438                 (void**)&frameDuration, 1);
439         uint32_t sensitivity = 100;
440         add_camera_metadata_entry(request,
441                 ANDROID_SENSOR_SENSITIVITY,
442                 (void**)&sensitivity, 1);
443         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
444         add_camera_metadata_entry(request,
445                 ANDROID_REQUEST_TYPE,
446                 (void**)&requestType, 1);
447 
448         uint32_t hourOfDay = 12;
449         add_camera_metadata_entry(request,
450                 0x80000000, // EMULATOR_HOUROFDAY
451                 &hourOfDay, 1);
452 
453         IF_ALOGV() {
454             std::cout << "Input request: " << std::endl;
455             dump_indented_camera_metadata(request, 0, 1, 2);
456         }
457 
458         res = mRequests.enqueue(request);
459         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
460 
461         res = mFrames.waitForBuffer(exposureTime + SEC);
462         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
463 
464         camera_metadata_t *frame;
465         res = mFrames.dequeue(&frame);
466         ASSERT_EQ(NO_ERROR, res);
467         ASSERT_TRUE(frame != NULL);
468 
469         IF_ALOGV() {
470             std::cout << "Output frame:" << std::endl;
471             dump_indented_camera_metadata(frame, 0, 1, 2);
472         }
473 
474         res = rawWaiter->waitForFrame(exposureTime + SEC);
475         ASSERT_EQ(NO_ERROR, res);
476 
477         CpuConsumer::LockedBuffer buffer;
478         res = rawConsumer->lockNextBuffer(&buffer);
479         ASSERT_EQ(NO_ERROR, res);
480 
481         IF_ALOGV() {
482             const char *dumpname =
483                     "/data/local/tmp/camera2_test-capture1raw-dump.raw";
484             ALOGV("Dumping raw buffer to %s", dumpname);
485             // Write to file
486             std::ofstream rawFile(dumpname);
487             size_t bpp = 2;
488             for (unsigned int y = 0; y < buffer.height; y++) {
489                 rawFile.write(
490                         (const char *)(buffer.data + y * buffer.stride * bpp),
491                         buffer.width * bpp);
492             }
493             rawFile.close();
494         }
495 
496         res = rawConsumer->unlockBuffer(buffer);
497         ASSERT_EQ(NO_ERROR, res);
498 
499         ASSERT_EQ(OK, waitUntilDrained());
500         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
501 
502         res = closeCameraDevice(mDevice);
503         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
504 
505     }
506 }
507 
TEST_F(Camera2Test,CaptureBurstRaw)508 TEST_F(Camera2Test, CaptureBurstRaw) {
509 
510     TEST_EXTENSION_FORKING_INIT;
511 
512     status_t res;
513 
514     for (int id = 0; id < getNumCameras(); id++) {
515         if (!isHal2Supported(id)) continue;
516 
517         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
518 
519         sp<CpuConsumer> rawConsumer = new CpuConsumer(1);
520         sp<FrameWaiter> rawWaiter = new FrameWaiter();
521         rawConsumer->setFrameAvailableListener(rawWaiter);
522 
523         const int32_t *rawResolutions;
524         size_t    rawResolutionsCount;
525 
526         int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
527 
528         getResolutionList(format,
529                 &rawResolutions, &rawResolutionsCount);
530 
531         if (rawResolutionsCount <= 0) {
532             const ::testing::TestInfo* const test_info =
533                 ::testing::UnitTest::GetInstance()->current_test_info();
534             std::cerr << "Skipping test "
535                       << test_info->test_case_name() << "."
536                       << test_info->name()
537                       << " because the optional format was not available: "
538                       << "RAW_SENSOR" << std::endl;
539             return;
540         }
541 
542         ASSERT_LT((uint32_t)0, rawResolutionsCount);
543 
544         // Pick first available raw resolution
545         int width = rawResolutions[0];
546         int height = rawResolutions[1];
547 
548         int streamId;
549         ASSERT_NO_FATAL_FAILURE(
550             setUpStream(rawConsumer->getProducerInterface(),
551                     width, height, format, &streamId) );
552 
553         camera_metadata_t *request;
554         request = allocate_camera_metadata(20, 2000);
555 
556         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
557         add_camera_metadata_entry(request,
558                 ANDROID_REQUEST_METADATA_MODE,
559                 (void**)&metadataMode, 1);
560         uint32_t outputStreams = streamId;
561         add_camera_metadata_entry(request,
562                 ANDROID_REQUEST_OUTPUT_STREAMS,
563                 (void**)&outputStreams, 1);
564 
565         uint64_t frameDuration = 30*MSEC;
566         add_camera_metadata_entry(request,
567                 ANDROID_SENSOR_FRAME_DURATION,
568                 (void**)&frameDuration, 1);
569         uint32_t sensitivity = 100;
570         add_camera_metadata_entry(request,
571                 ANDROID_SENSOR_SENSITIVITY,
572                 (void**)&sensitivity, 1);
573         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
574         add_camera_metadata_entry(request,
575                 ANDROID_REQUEST_TYPE,
576                 (void**)&requestType, 1);
577 
578         uint32_t hourOfDay = 12;
579         add_camera_metadata_entry(request,
580                 0x80000000, // EMULATOR_HOUROFDAY
581                 &hourOfDay, 1);
582 
583         IF_ALOGV() {
584             std::cout << "Input request template: " << std::endl;
585             dump_indented_camera_metadata(request, 0, 1, 2);
586         }
587 
588         int numCaptures = 10;
589 
590         // Enqueue numCaptures requests with increasing exposure time
591 
592         uint64_t exposureTime = 100 * USEC;
593         for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) {
594             camera_metadata_t *req;
595             req = allocate_camera_metadata(20, 2000);
596             append_camera_metadata(req, request);
597 
598             add_camera_metadata_entry(req,
599                     ANDROID_SENSOR_EXPOSURE_TIME,
600                     (void**)&exposureTime, 1);
601             exposureTime *= 2;
602 
603             res = mRequests.enqueue(req);
604             ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: "
605                     << strerror(-res);
606         }
607 
608         // Get frames and image buffers one by one
609         uint64_t expectedExposureTime = 100 * USEC;
610         for (int frameCount = 0; frameCount < 10; frameCount++) {
611             res = mFrames.waitForBuffer(SEC + expectedExposureTime);
612             ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
613 
614             camera_metadata_t *frame;
615             res = mFrames.dequeue(&frame);
616             ASSERT_EQ(NO_ERROR, res);
617             ASSERT_TRUE(frame != NULL);
618 
619             camera_metadata_entry_t frameNumber;
620             res = find_camera_metadata_entry(frame,
621                     ANDROID_REQUEST_FRAME_COUNT,
622                     &frameNumber);
623             ASSERT_EQ(NO_ERROR, res);
624             ASSERT_EQ(frameCount, *frameNumber.data.i32);
625 
626             res = rawWaiter->waitForFrame(SEC + expectedExposureTime);
627             ASSERT_EQ(NO_ERROR, res) <<
628                     "Never got raw data for capture " << frameCount;
629 
630             CpuConsumer::LockedBuffer buffer;
631             res = rawConsumer->lockNextBuffer(&buffer);
632             ASSERT_EQ(NO_ERROR, res);
633 
634             IF_ALOGV() {
635                 char dumpname[60];
636                 snprintf(dumpname, 60,
637                         "/data/local/tmp/camera2_test-"
638                         "captureBurstRaw-dump_%d.raw",
639                         frameCount);
640                 ALOGV("Dumping raw buffer to %s", dumpname);
641                 // Write to file
642                 std::ofstream rawFile(dumpname);
643                 for (unsigned int y = 0; y < buffer.height; y++) {
644                     rawFile.write(
645                             (const char *)(buffer.data + y * buffer.stride * 2),
646                             buffer.width * 2);
647                 }
648                 rawFile.close();
649             }
650 
651             res = rawConsumer->unlockBuffer(buffer);
652             ASSERT_EQ(NO_ERROR, res);
653 
654             expectedExposureTime *= 2;
655         }
656     }
657 }
658 
TEST_F(Camera2Test,ConstructDefaultRequests)659 TEST_F(Camera2Test, ConstructDefaultRequests) {
660 
661     TEST_EXTENSION_FORKING_INIT;
662 
663     status_t res;
664 
665     for (int id = 0; id < getNumCameras(); id++) {
666         if (!isHal2Supported(id)) continue;
667 
668         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
669 
670         for (int i = CAMERA2_TEMPLATE_PREVIEW; i < CAMERA2_TEMPLATE_COUNT;
671              i++) {
672             camera_metadata_t *request = NULL;
673             res = mDevice->ops->construct_default_request(mDevice,
674                     i,
675                     &request);
676             EXPECT_EQ(NO_ERROR, res) <<
677                     "Unable to construct request from template type " << i;
678             EXPECT_TRUE(request != NULL);
679             EXPECT_LT((size_t)0, get_camera_metadata_entry_count(request));
680             EXPECT_LT((size_t)0, get_camera_metadata_data_count(request));
681 
682             IF_ALOGV() {
683                 std::cout << "  ** Template type " << i << ":"<<std::endl;
684                 dump_indented_camera_metadata(request, 0, 2, 4);
685             }
686 
687             free_camera_metadata(request);
688         }
689     }
690 }
691 
TEST_F(Camera2Test,Capture1Jpeg)692 TEST_F(Camera2Test, Capture1Jpeg) {
693     status_t res;
694 
695     for (int id = 0; id < getNumCameras(); id++) {
696         if (!isHal2Supported(id)) continue;
697 
698         ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
699 
700         sp<CpuConsumer> jpegConsumer = new CpuConsumer(1);
701         sp<FrameWaiter> jpegWaiter = new FrameWaiter();
702         jpegConsumer->setFrameAvailableListener(jpegWaiter);
703 
704         const int32_t *jpegResolutions;
705         size_t   jpegResolutionsCount;
706 
707         int format = HAL_PIXEL_FORMAT_BLOB;
708 
709         getResolutionList(format,
710                 &jpegResolutions, &jpegResolutionsCount);
711         ASSERT_LT((size_t)0, jpegResolutionsCount);
712 
713         // Pick first available JPEG resolution
714         int width = jpegResolutions[0];
715         int height = jpegResolutions[1];
716 
717         int streamId;
718         ASSERT_NO_FATAL_FAILURE(
719             setUpStream(jpegConsumer->getProducerInterface(),
720                     width, height, format, &streamId) );
721 
722         camera_metadata_t *request;
723         request = allocate_camera_metadata(20, 2000);
724 
725         uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
726         add_camera_metadata_entry(request,
727                 ANDROID_REQUEST_METADATA_MODE,
728                 (void**)&metadataMode, 1);
729         uint32_t outputStreams = streamId;
730         add_camera_metadata_entry(request,
731                 ANDROID_REQUEST_OUTPUT_STREAMS,
732                 (void**)&outputStreams, 1);
733 
734         uint64_t exposureTime = 10*MSEC;
735         add_camera_metadata_entry(request,
736                 ANDROID_SENSOR_EXPOSURE_TIME,
737                 (void**)&exposureTime, 1);
738         uint64_t frameDuration = 30*MSEC;
739         add_camera_metadata_entry(request,
740                 ANDROID_SENSOR_FRAME_DURATION,
741                 (void**)&frameDuration, 1);
742         uint32_t sensitivity = 100;
743         add_camera_metadata_entry(request,
744                 ANDROID_SENSOR_SENSITIVITY,
745                 (void**)&sensitivity, 1);
746         uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
747         add_camera_metadata_entry(request,
748                 ANDROID_REQUEST_TYPE,
749                 (void**)&requestType, 1);
750 
751         uint32_t hourOfDay = 12;
752         add_camera_metadata_entry(request,
753                 0x80000000, // EMULATOR_HOUROFDAY
754                 &hourOfDay, 1);
755 
756         IF_ALOGV() {
757             std::cout << "Input request: " << std::endl;
758             dump_indented_camera_metadata(request, 0, 1, 4);
759         }
760 
761         res = mRequests.enqueue(request);
762         ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
763 
764         res = mFrames.waitForBuffer(exposureTime + SEC);
765         ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
766 
767         camera_metadata_t *frame;
768         res = mFrames.dequeue(&frame);
769         ASSERT_EQ(NO_ERROR, res);
770         ASSERT_TRUE(frame != NULL);
771 
772         IF_ALOGV() {
773             std::cout << "Output frame:" << std::endl;
774             dump_indented_camera_metadata(frame, 0, 1, 4);
775         }
776 
777         res = jpegWaiter->waitForFrame(exposureTime + SEC);
778         ASSERT_EQ(NO_ERROR, res);
779 
780         CpuConsumer::LockedBuffer buffer;
781         res = jpegConsumer->lockNextBuffer(&buffer);
782         ASSERT_EQ(NO_ERROR, res);
783 
784         IF_ALOGV() {
785             const char *dumpname =
786                     "/data/local/tmp/camera2_test-capture1jpeg-dump.jpeg";
787             ALOGV("Dumping raw buffer to %s", dumpname);
788             // Write to file
789             std::ofstream jpegFile(dumpname);
790             size_t bpp = 1;
791             for (unsigned int y = 0; y < buffer.height; y++) {
792                 jpegFile.write(
793                         (const char *)(buffer.data + y * buffer.stride * bpp),
794                         buffer.width * bpp);
795             }
796             jpegFile.close();
797         }
798 
799         res = jpegConsumer->unlockBuffer(buffer);
800         ASSERT_EQ(NO_ERROR, res);
801 
802         ASSERT_EQ(OK, waitUntilDrained());
803         ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
804 
805         res = closeCameraDevice(mDevice);
806         ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
807 
808     }
809 }
810 
811 } // namespace tests
812 } // namespace camera2
813 } // namespace android
814