1 /*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <gtest/gtest.h>
18 #include <inttypes.h>
19
20 #define LOG_TAG "CameraBurstTest"
21 //#define LOG_NDEBUG 0
22 #include <utils/Log.h>
23 #include <utils/Timers.h>
24
25 #include <cmath>
26
27 #include "CameraStreamFixture.h"
28 #include "TestExtensions.h"
29
30 #define CAMERA_FRAME_TIMEOUT 1000000000LL //nsecs (1 secs)
31 #define CAMERA_HEAP_COUNT 2 //HALBUG: 1 means registerBuffers fails
32 #define CAMERA_BURST_DEBUGGING 0
33 #define CAMERA_FRAME_BURST_COUNT 10
34
35 /* constants for the exposure test */
36 #define CAMERA_EXPOSURE_DOUBLE 2
37 #define CAMERA_EXPOSURE_DOUBLING_THRESHOLD 1.0f
38 #define CAMERA_EXPOSURE_DOUBLING_COUNT 4
39 #define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
40 #define CAMERA_EXPOSURE_STARTING 100000 // 1/10ms, up to 51.2ms with 10 steps
41
42 #define USEC 1000LL // in ns
43 #define MSEC 1000000LL // in ns
44 #define SEC 1000000000LL // in ns
45
46 #if CAMERA_BURST_DEBUGGING
47 #define dout std::cout
48 #else
49 #define dout if (0) std::cout
50 #endif
51
52 #define WARN_UNLESS(condition) (!(condition) ? (std::cerr) : (std::ostream(NULL)) << "Warning: ")
53 #define WARN_LE(exp, act) WARN_UNLESS((exp) <= (act))
54 #define WARN_LT(exp, act) WARN_UNLESS((exp) < (act))
55 #define WARN_GT(exp, act) WARN_UNLESS((exp) > (act))
56
57 using namespace android;
58 using namespace android::camera2;
59
60 namespace android {
61 namespace camera2 {
62 namespace tests {
63
64 static CameraStreamParams STREAM_PARAMETERS = {
65 /*mFormat*/ CAMERA_EXPOSURE_FORMAT,
66 /*mHeapCount*/ CAMERA_HEAP_COUNT
67 };
68
69 class CameraBurstTest
70 : public ::testing::Test,
71 public CameraStreamFixture {
72
73 public:
CameraBurstTest()74 CameraBurstTest() : CameraStreamFixture(STREAM_PARAMETERS) {
75 TEST_EXTENSION_FORKING_CONSTRUCTOR;
76
77 if (HasFatalFailure()) {
78 return;
79 }
80
81 CreateStream();
82 }
83
~CameraBurstTest()84 ~CameraBurstTest() {
85 TEST_EXTENSION_FORKING_DESTRUCTOR;
86
87 if (mDevice.get()) {
88 mDevice->waitUntilDrained();
89 }
90 DeleteStream();
91 }
92
SetUp()93 virtual void SetUp() {
94 TEST_EXTENSION_FORKING_SET_UP;
95 }
TearDown()96 virtual void TearDown() {
97 TEST_EXTENSION_FORKING_TEAR_DOWN;
98 }
99
100 /* this assumes the format is YUV420sp or flexible YUV */
TotalBrightness(const CpuConsumer::LockedBuffer & imgBuffer,int * underexposed,int * overexposed) const101 long long TotalBrightness(const CpuConsumer::LockedBuffer& imgBuffer,
102 int *underexposed,
103 int *overexposed) const {
104
105 const uint8_t* buf = imgBuffer.data;
106 size_t stride = imgBuffer.stride;
107
108 /* iterate over the Y plane only */
109 long long acc = 0;
110
111 *underexposed = 0;
112 *overexposed = 0;
113
114 for (size_t y = 0; y < imgBuffer.height; ++y) {
115 for (size_t x = 0; x < imgBuffer.width; ++x) {
116 const uint8_t p = buf[y * stride + x];
117
118 if (p == 0) {
119 if (underexposed) {
120 ++*underexposed;
121 }
122 continue;
123 } else if (p == 255) {
124 if (overexposed) {
125 ++*overexposed;
126 }
127 continue;
128 }
129
130 acc += p;
131 }
132 }
133
134 return acc;
135 }
136
137 // Parses a comma-separated string list into a Vector
138 template<typename T>
ParseList(const char * src,Vector<T> & list)139 void ParseList(const char *src, Vector<T> &list) {
140 std::istringstream s(src);
141 while (!s.eof()) {
142 char c = s.peek();
143 if (c == ',' || c == ' ') {
144 s.ignore(1, EOF);
145 continue;
146 }
147 T val;
148 s >> val;
149 list.push_back(val);
150 }
151 }
152
153 };
154
TEST_F(CameraBurstTest,ManualExposureControl)155 TEST_F(CameraBurstTest, ManualExposureControl) {
156
157 TEST_EXTENSION_FORKING_INIT;
158
159 // Range of valid exposure times, in nanoseconds
160 int64_t minExp, maxExp;
161 {
162 camera_metadata_ro_entry exposureTimeRange =
163 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
164
165 ASSERT_EQ(2u, exposureTimeRange.count);
166 minExp = exposureTimeRange.data.i64[0];
167 maxExp = exposureTimeRange.data.i64[1];
168 }
169
170 dout << "Min exposure is " << minExp;
171 dout << " max exposure is " << maxExp << std::endl;
172
173 // Calculate some set of valid exposure times for each request
174 int64_t exposures[CAMERA_FRAME_BURST_COUNT];
175 exposures[0] = CAMERA_EXPOSURE_STARTING;
176 for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
177 exposures[i] = exposures[i-1] * CAMERA_EXPOSURE_DOUBLE;
178 }
179 // Our calculated exposure times should be in [minExp, maxExp]
180 EXPECT_LE(minExp, exposures[0])
181 << "Minimum exposure range is too high, wanted at most "
182 << exposures[0] << "ns";
183 EXPECT_GE(maxExp, exposures[CAMERA_FRAME_BURST_COUNT-1])
184 << "Maximum exposure range is too low, wanted at least "
185 << exposures[CAMERA_FRAME_BURST_COUNT-1] << "ns";
186
187 // Create a preview request, turning off all 3A
188 CameraMetadata previewRequest;
189 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
190 &previewRequest));
191 {
192 Vector<int32_t> outputStreamIds;
193 outputStreamIds.push(mStreamId);
194 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
195 outputStreamIds));
196
197 // Disable all 3A routines
198 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
199 ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
200 &cmOff, 1));
201
202 int requestId = 1;
203 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
204 &requestId, 1));
205
206 if (CAMERA_BURST_DEBUGGING) {
207 int frameCount = 0;
208 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_FRAME_COUNT,
209 &frameCount, 1));
210 }
211 }
212
213 if (CAMERA_BURST_DEBUGGING) {
214 previewRequest.dump(STDOUT_FILENO);
215 }
216
217 // Submit capture requests
218 for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
219 CameraMetadata tmpRequest = previewRequest;
220 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
221 &exposures[i], 1));
222 ALOGV("Submitting capture request %d with exposure %"PRId64, i,
223 exposures[i]);
224 dout << "Capture request " << i << " exposure is "
225 << (exposures[i]/1e6f) << std::endl;
226 ASSERT_EQ(OK, mDevice->capture(tmpRequest));
227 }
228
229 dout << "Buffer dimensions " << mWidth << "x" << mHeight << std::endl;
230
231 float brightnesses[CAMERA_FRAME_BURST_COUNT];
232 // Get each frame (metadata) and then the buffer. Calculate brightness.
233 for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
234 ALOGV("Reading capture request %d with exposure %"PRId64, i, exposures[i]);
235 ASSERT_EQ(OK, mDevice->waitForNextFrame(CAMERA_FRAME_TIMEOUT));
236 ALOGV("Reading capture request-1 %d", i);
237 CaptureResult result;
238 ASSERT_EQ(OK, mDevice->getNextResult(&result));
239 ALOGV("Reading capture request-2 %d", i);
240
241 ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
242 ALOGV("We got the frame now");
243
244 CpuConsumer::LockedBuffer imgBuffer;
245 ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
246
247 int underexposed, overexposed;
248 long long brightness = TotalBrightness(imgBuffer, &underexposed,
249 &overexposed);
250 float avgBrightness = brightness * 1.0f /
251 (mWidth * mHeight - (underexposed + overexposed));
252 ALOGV("Total brightness for frame %d was %lld (underexposed %d, "
253 "overexposed %d), avg %f", i, brightness, underexposed,
254 overexposed, avgBrightness);
255 dout << "Average brightness (frame " << i << ") was " << avgBrightness
256 << " (underexposed " << underexposed << ", overexposed "
257 << overexposed << ")" << std::endl;
258
259 ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
260
261 brightnesses[i] = avgBrightness;
262 }
263
264 // Calculate max consecutive frame exposure doubling
265 float prev = brightnesses[0];
266 int doubling_count = 1;
267 int max_doubling_count = 0;
268 for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
269 if (fabs(brightnesses[i] - prev*CAMERA_EXPOSURE_DOUBLE)
270 <= CAMERA_EXPOSURE_DOUBLING_THRESHOLD) {
271 doubling_count++;
272 }
273 else {
274 max_doubling_count = std::max(max_doubling_count, doubling_count);
275 doubling_count = 1;
276 }
277 prev = brightnesses[i];
278 }
279
280 dout << "max doubling count: " << max_doubling_count << std::endl;
281
282 /**
283 * Make this check warning only, since the brightness calculation is not reliable
284 * and we have separate test to cover this case. Plus it is pretty subtle to make
285 * it right without complicating the test too much.
286 */
287 WARN_LE(CAMERA_EXPOSURE_DOUBLING_COUNT, max_doubling_count)
288 << "average brightness should double at least "
289 << CAMERA_EXPOSURE_DOUBLING_COUNT
290 << " times over each consecutive frame as the exposure is doubled"
291 << std::endl;
292 }
293
294 /**
295 * This test varies exposure time, frame duration, and sensitivity for a
296 * burst of captures. It picks values by default, but the selection can be
297 * overridden with the environment variables
298 * CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES
299 * CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS
300 * CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES
301 * which must all be a list of comma-separated values, and each list must be
302 * the same length. In addition, if the environment variable
303 * CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES
304 * is set to 1, then the YUV buffers are dumped into files named
305 * "camera2_test_variable_burst_frame_NNN.yuv"
306 *
307 * For example:
308 * $ setenv CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES 10000000,20000000
309 * $ setenv CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS 40000000,40000000
310 * $ setenv CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES 200,100
311 * $ setenv CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES 1
312 * $ /data/nativetest/camera2_test/camera2_test --gtest_filter="*VariableBurst"
313 */
314 // Disable this test for now, as we need cleanup the usage of the deprecated tag quite a bit.
TEST_F(CameraBurstTest,DISABLED_VariableBurst)315 TEST_F(CameraBurstTest, DISABLED_VariableBurst) {
316
317 TEST_EXTENSION_FORKING_INIT;
318
319 // Bounds for checking frame duration is within range
320 const nsecs_t DURATION_UPPER_BOUND = 10 * MSEC;
321 const nsecs_t DURATION_LOWER_BOUND = 20 * MSEC;
322
323 // Threshold for considering two captures to have equivalent exposure value,
324 // as a ratio of the smaller EV to the larger EV.
325 const float EV_MATCH_BOUND = 0.95;
326 // Bound for two captures with equivalent exp values to have the same
327 // measured brightness, in 0-255 luminance.
328 const float BRIGHTNESS_MATCH_BOUND = 5;
329
330 // Environment variables to look for to override test settings
331 const char *expEnv = "CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES";
332 const char *durationEnv = "CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS";
333 const char *sensitivityEnv = "CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES";
334 const char *dumpFrameEnv = "CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES";
335
336 // Range of valid exposure times, in nanoseconds
337 int64_t minExp = 0, maxExp = 0;
338 // List of valid sensor sensitivities
339 Vector<int32_t> sensitivities;
340 // Range of valid frame durations, in nanoseconds
341 int64_t minDuration = 0, maxDuration = 0;
342
343 {
344 camera_metadata_ro_entry exposureTimeRange =
345 GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
346
347 EXPECT_EQ(2u, exposureTimeRange.count) << "Bad exposure time range tag."
348 "Using default values";
349 if (exposureTimeRange.count == 2) {
350 minExp = exposureTimeRange.data.i64[0];
351 maxExp = exposureTimeRange.data.i64[1];
352 }
353
354 EXPECT_LT(0, minExp) << "Minimum exposure time is 0";
355 EXPECT_LT(0, maxExp) << "Maximum exposure time is 0";
356 EXPECT_LE(minExp, maxExp) << "Minimum exposure is greater than maximum";
357
358 if (minExp == 0) {
359 minExp = 1 * MSEC; // Fallback minimum exposure time
360 }
361
362 if (maxExp == 0) {
363 maxExp = 10 * SEC; // Fallback maximum exposure time
364 }
365 }
366
367 camera_metadata_ro_entry hardwareLevel =
368 GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
369 ASSERT_EQ(1u, hardwareLevel.count);
370 uint8_t level = hardwareLevel.data.u8[0];
371 ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
372 ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
373 if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) {
374 const ::testing::TestInfo* const test_info =
375 ::testing::UnitTest::GetInstance()->current_test_info();
376 std::cerr << "Skipping test "
377 << test_info->test_case_name() << "."
378 << test_info->name()
379 << " because HAL hardware supported level is limited "
380 << std::endl;
381 return;
382 }
383
384 dout << "Stream size is " << mWidth << " x " << mHeight << std::endl;
385 dout << "Valid exposure range is: " <<
386 minExp << " - " << maxExp << " ns " << std::endl;
387
388 {
389 camera_metadata_ro_entry sensivityRange =
390 GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
391 EXPECT_EQ(2u, sensivityRange.count) << "No sensitivity range listed."
392 "Falling back to default set.";
393 int32_t minSensitivity = 100;
394 int32_t maxSensitivity = 800;
395 if (sensivityRange.count == 2) {
396 ASSERT_GT(sensivityRange.data.i32[0], 0);
397 ASSERT_GT(sensivityRange.data.i32[1], 0);
398 minSensitivity = sensivityRange.data.i32[0];
399 maxSensitivity = sensivityRange.data.i32[1];
400 }
401 int32_t count = (maxSensitivity - minSensitivity + 99) / 100;
402 sensitivities.push_back(minSensitivity);
403 for (int i = 1; i < count; i++) {
404 sensitivities.push_back(minSensitivity + i * 100);
405 }
406 sensitivities.push_back(maxSensitivity);
407 }
408
409 dout << "Available sensitivities: ";
410 for (size_t i = 0; i < sensitivities.size(); i++) {
411 dout << sensitivities[i] << " ";
412 }
413 dout << std::endl;
414
415 {
416 camera_metadata_ro_entry availableProcessedSizes =
417 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
418
419 camera_metadata_ro_entry availableProcessedMinFrameDurations =
420 GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
421
422 EXPECT_EQ(availableProcessedSizes.count,
423 availableProcessedMinFrameDurations.count * 2) <<
424 "The number of minimum frame durations doesn't match the number of "
425 "available sizes. Using fallback values";
426
427 if (availableProcessedSizes.count ==
428 availableProcessedMinFrameDurations.count * 2) {
429 bool gotSize = false;
430 for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
431 if (availableProcessedSizes.data.i32[i] == mWidth &&
432 availableProcessedSizes.data.i32[i+1] == mHeight) {
433 gotSize = true;
434 minDuration = availableProcessedMinFrameDurations.data.i64[i/2];
435 }
436 }
437 EXPECT_TRUE(gotSize) << "Can't find stream size in list of "
438 "available sizes: " << mWidth << ", " << mHeight;
439 }
440 if (minDuration == 0) {
441 minDuration = 1 * SEC / 30; // Fall back to 30 fps as minimum duration
442 }
443
444 ASSERT_LT(0, minDuration);
445
446 camera_metadata_ro_entry maxFrameDuration =
447 GetStaticEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION);
448
449 EXPECT_EQ(1u, maxFrameDuration.count) << "No valid maximum frame duration";
450
451 if (maxFrameDuration.count == 1) {
452 maxDuration = maxFrameDuration.data.i64[0];
453 }
454
455 EXPECT_GT(maxDuration, 0) << "Max duration is 0 or not given, using fallback";
456
457 if (maxDuration == 0) {
458 maxDuration = 10 * SEC; // Fall back to 10 seconds as max duration
459 }
460
461 }
462 dout << "Available frame duration range for configured stream size: "
463 << minDuration << " - " << maxDuration << " ns" << std::endl;
464
465 // Get environment variables if set
466 const char *expVal = getenv(expEnv);
467 const char *durationVal = getenv(durationEnv);
468 const char *sensitivityVal = getenv(sensitivityEnv);
469
470 bool gotExp = (expVal != NULL);
471 bool gotDuration = (durationVal != NULL);
472 bool gotSensitivity = (sensitivityVal != NULL);
473
474 // All or none must be provided if using override envs
475 ASSERT_TRUE( (gotDuration && gotExp && gotSensitivity) ||
476 (!gotDuration && !gotExp && !gotSensitivity) ) <<
477 "Incomplete set of environment variable overrides provided";
478
479 Vector<int64_t> expList, durationList;
480 Vector<int32_t> sensitivityList;
481 if (gotExp) {
482 ParseList(expVal, expList);
483 ParseList(durationVal, durationList);
484 ParseList(sensitivityVal, sensitivityList);
485
486 ASSERT_TRUE(
487 (expList.size() == durationList.size()) &&
488 (durationList.size() == sensitivityList.size())) <<
489 "Mismatched sizes in env lists, or parse error";
490
491 dout << "Using burst list from environment with " << expList.size() <<
492 " captures" << std::endl;
493 } else {
494 // Create a default set of controls based on the available ranges
495
496 int64_t e;
497 int64_t d;
498 int32_t s;
499
500 // Exposure ramp
501
502 e = minExp;
503 d = minDuration;
504 s = sensitivities[0];
505 while (e < maxExp) {
506 expList.push_back(e);
507 durationList.push_back(d);
508 sensitivityList.push_back(s);
509 e = e * 2;
510 }
511 e = maxExp;
512 expList.push_back(e);
513 durationList.push_back(d);
514 sensitivityList.push_back(s);
515
516 // Duration ramp
517
518 e = 30 * MSEC;
519 d = minDuration;
520 s = sensitivities[0];
521 while (d < maxDuration) {
522 // make sure exposure <= frame duration
523 expList.push_back(e > d ? d : e);
524 durationList.push_back(d);
525 sensitivityList.push_back(s);
526 d = d * 2;
527 }
528
529 // Sensitivity ramp
530
531 e = 30 * MSEC;
532 d = 30 * MSEC;
533 d = d > minDuration ? d : minDuration;
534 for (size_t i = 0; i < sensitivities.size(); i++) {
535 expList.push_back(e);
536 durationList.push_back(d);
537 sensitivityList.push_back(sensitivities[i]);
538 }
539
540 // Constant-EV ramp, duration == exposure
541
542 e = 30 * MSEC; // at ISO 100
543 for (size_t i = 0; i < sensitivities.size(); i++) {
544 int64_t e_adj = e * 100 / sensitivities[i];
545 expList.push_back(e_adj);
546 durationList.push_back(e_adj > minDuration ? e_adj : minDuration);
547 sensitivityList.push_back(sensitivities[i]);
548 }
549
550 dout << "Default burst sequence created with " << expList.size() <<
551 " entries" << std::endl;
552 }
553
554 // Validate the list, but warn only
555 for (size_t i = 0; i < expList.size(); i++) {
556 EXPECT_GE(maxExp, expList[i])
557 << "Capture " << i << " exposure too long: " << expList[i];
558 EXPECT_LE(minExp, expList[i])
559 << "Capture " << i << " exposure too short: " << expList[i];
560 EXPECT_GE(maxDuration, durationList[i])
561 << "Capture " << i << " duration too long: " << durationList[i];
562 EXPECT_LE(minDuration, durationList[i])
563 << "Capture " << i << " duration too short: " << durationList[i];
564 bool validSensitivity = false;
565 for (size_t j = 0; j < sensitivities.size(); j++) {
566 if (sensitivityList[i] == sensitivities[j]) {
567 validSensitivity = true;
568 break;
569 }
570 }
571 EXPECT_TRUE(validSensitivity)
572 << "Capture " << i << " sensitivity not in list: " << sensitivityList[i];
573 }
574
575 // Check if debug yuv dumps are requested
576
577 bool dumpFrames = false;
578 {
579 const char *frameDumpVal = getenv(dumpFrameEnv);
580 if (frameDumpVal != NULL) {
581 if (frameDumpVal[0] == '1') dumpFrames = true;
582 }
583 }
584
585 dout << "Dumping YUV frames " <<
586 (dumpFrames ? "enabled, not checking timing" : "disabled") << std::endl;
587
588 // Create a base preview request, turning off all 3A
589 CameraMetadata previewRequest;
590 ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
591 &previewRequest));
592 {
593 Vector<int32_t> outputStreamIds;
594 outputStreamIds.push(mStreamId);
595 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
596 outputStreamIds));
597
598 // Disable all 3A routines
599 uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
600 ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
601 &cmOff, 1));
602
603 int requestId = 1;
604 ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
605 &requestId, 1));
606 }
607
608 // Submit capture requests
609
610 for (size_t i = 0; i < expList.size(); ++i) {
611 CameraMetadata tmpRequest = previewRequest;
612 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
613 &expList[i], 1));
614 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_FRAME_DURATION,
615 &durationList[i], 1));
616 ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_SENSITIVITY,
617 &sensitivityList[i], 1));
618 ALOGV("Submitting capture %zu with exposure %"PRId64", frame duration %"PRId64", sensitivity %d",
619 i, expList[i], durationList[i], sensitivityList[i]);
620 dout << "Capture request " << i <<
621 ": exposure is " << (expList[i]/1e6f) << " ms" <<
622 ", frame duration is " << (durationList[i]/1e6f) << " ms" <<
623 ", sensitivity is " << sensitivityList[i] <<
624 std::endl;
625 ASSERT_EQ(OK, mDevice->capture(tmpRequest));
626 }
627
628 Vector<float> brightnesses;
629 Vector<nsecs_t> captureTimes;
630 brightnesses.setCapacity(expList.size());
631 captureTimes.setCapacity(expList.size());
632
633 // Get each frame (metadata) and then the buffer. Calculate brightness.
634 for (size_t i = 0; i < expList.size(); ++i) {
635
636 ALOGV("Reading request %zu", i);
637 dout << "Waiting for capture " << i << ": " <<
638 " exposure " << (expList[i]/1e6f) << " ms," <<
639 " frame duration " << (durationList[i]/1e6f) << " ms," <<
640 " sensitivity " << sensitivityList[i] <<
641 std::endl;
642
643 // Set wait limit based on expected frame duration, or minimum timeout
644 int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
645 if (expList[i] * 2 > waitLimit) waitLimit = expList[i] * 2;
646 if (durationList[i] * 2 > waitLimit) waitLimit = durationList[i] * 2;
647
648 ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
649 ALOGV("Reading capture request-1 %zu", i);
650 CaptureResult result;
651 ASSERT_EQ(OK, mDevice->getNextResult(&result));
652 ALOGV("Reading capture request-2 %zu", i);
653
654 ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
655 ALOGV("We got the frame now");
656
657 captureTimes.push_back(systemTime());
658
659 CpuConsumer::LockedBuffer imgBuffer;
660 ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
661
662 int underexposed, overexposed;
663 float avgBrightness = 0;
664 long long brightness = TotalBrightness(imgBuffer, &underexposed,
665 &overexposed);
666 int numValidPixels = mWidth * mHeight - (underexposed + overexposed);
667 if (numValidPixels != 0) {
668 avgBrightness = brightness * 1.0f / numValidPixels;
669 } else if (underexposed < overexposed) {
670 avgBrightness = 255;
671 }
672
673 ALOGV("Total brightness for frame %zu was %lld (underexposed %d, "
674 "overexposed %d), avg %f", i, brightness, underexposed,
675 overexposed, avgBrightness);
676 dout << "Average brightness (frame " << i << ") was " << avgBrightness
677 << " (underexposed " << underexposed << ", overexposed "
678 << overexposed << ")" << std::endl;
679 brightnesses.push_back(avgBrightness);
680
681 if (i != 0) {
682 float prevEv = static_cast<float>(expList[i - 1]) * sensitivityList[i - 1];
683 float currentEv = static_cast<float>(expList[i]) * sensitivityList[i];
684 float evRatio = (prevEv > currentEv) ? (currentEv / prevEv) :
685 (prevEv / currentEv);
686 if ( evRatio > EV_MATCH_BOUND ) {
687 WARN_LT(fabs(brightnesses[i] - brightnesses[i - 1]),
688 BRIGHTNESS_MATCH_BOUND) <<
689 "Capture brightness different from previous, even though "
690 "they have the same EV value. Ev now: " << currentEv <<
691 ", previous: " << prevEv << ". Brightness now: " <<
692 brightnesses[i] << ", previous: " << brightnesses[i-1] <<
693 std::endl;
694 }
695 // Only check timing if not saving to disk, since that slows things
696 // down substantially
697 if (!dumpFrames) {
698 nsecs_t timeDelta = captureTimes[i] - captureTimes[i-1];
699 nsecs_t expectedDelta = expList[i] > durationList[i] ?
700 expList[i] : durationList[i];
701 WARN_LT(timeDelta, expectedDelta + DURATION_UPPER_BOUND) <<
702 "Capture took " << timeDelta << " ns to receive, but expected"
703 " frame duration was " << expectedDelta << " ns." <<
704 std::endl;
705 WARN_GT(timeDelta, expectedDelta - DURATION_LOWER_BOUND) <<
706 "Capture took " << timeDelta << " ns to receive, but expected"
707 " frame duration was " << expectedDelta << " ns." <<
708 std::endl;
709 dout << "Time delta from previous frame: " << timeDelta / 1e6 <<
710 " ms. Expected " << expectedDelta / 1e6 << " ms" << std::endl;
711 }
712 }
713
714 if (dumpFrames) {
715 String8 dumpName =
716 String8::format("/data/local/tmp/camera2_test_variable_burst_frame_%03zu.yuv", i);
717 dout << " Writing YUV dump to " << dumpName << std::endl;
718 DumpYuvToFile(dumpName, imgBuffer);
719 }
720
721 ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
722 }
723
724 }
725
726 }
727 }
728 }
729