1 /*
2 * Copyright (C) 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "device_cb.h"
18
19 #include <aidl/android/hardware/graphics/common/PixelFormat.h>
20 #include <aidlcommonsupport/NativeHandle.h>
21 #include <grallocusage/GrallocUsageConversion.h>
22 #include <cinttypes>
23
24 using ::aidl::android::hardware::camera::device::BufferStatus;
25 using ::aidl::android::hardware::camera::device::ErrorMsg;
26 using ::aidl::android::hardware::camera::device::StreamBufferRequestError;
27 using ::aidl::android::hardware::camera::device::StreamBuffersVal;
28 using ::aidl::android::hardware::graphics::common::PixelFormat;
29
30 const int64_t kBufferReturnTimeoutSec = 1;
31
DeviceCb(CameraAidlTest * parent,camera_metadata_t * staticMeta)32 DeviceCb::DeviceCb(CameraAidlTest* parent, camera_metadata_t* staticMeta) : mParent(parent) {
33 mStaticMetadata = staticMeta;
34 }
35
notify(const std::vector<NotifyMsg> & msgs)36 ScopedAStatus DeviceCb::notify(const std::vector<NotifyMsg>& msgs) {
37 std::vector<std::pair<bool, nsecs_t>> readoutTimestamps;
38
39 size_t count = msgs.size();
40 readoutTimestamps.resize(count);
41
42 for (size_t i = 0; i < count; i++) {
43 const NotifyMsg& msg = msgs[i];
44 switch (msg.getTag()) {
45 case NotifyMsg::Tag::error:
46 readoutTimestamps[i] = {false, 0};
47 break;
48 case NotifyMsg::Tag::shutter:
49 const auto& shutter = msg.get<NotifyMsg::Tag::shutter>();
50 readoutTimestamps[i] = {true, shutter.readoutTimestamp};
51 break;
52 }
53 }
54
55 return notifyHelper(msgs, readoutTimestamps);
56 }
57
processCaptureResult(const std::vector<CaptureResult> & results)58 ScopedAStatus DeviceCb::processCaptureResult(const std::vector<CaptureResult>& results) {
59 if (nullptr == mParent) {
60 return ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
61 }
62
63 bool notify = false;
64 std::unique_lock<std::mutex> l(mParent->mLock);
65 for (const auto& result : results) {
66 notify = processCaptureResultLocked(result, result.physicalCameraMetadata);
67 }
68
69 l.unlock();
70 if (notify) {
71 mParent->mResultCondition.notify_one();
72 }
73
74 return ndk::ScopedAStatus::ok();
75 }
76
requestStreamBuffers(const std::vector<BufferRequest> & bufReqs,std::vector<StreamBufferRet> * buffers,BufferRequestStatus * _aidl_return)77 ScopedAStatus DeviceCb::requestStreamBuffers(const std::vector<BufferRequest>& bufReqs,
78 std::vector<StreamBufferRet>* buffers,
79 BufferRequestStatus* _aidl_return) {
80 std::vector<StreamBufferRet>& bufRets = *buffers;
81 std::unique_lock<std::mutex> l(mLock);
82
83 if (!mUseHalBufManager) {
84 ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__);
85 ADD_FAILURE();
86 *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
87 return ScopedAStatus::ok();
88 }
89
90 if (bufReqs.size() > mStreams.size()) {
91 ALOGE("%s: illegal buffer request: too many requests!", __FUNCTION__);
92 ADD_FAILURE();
93 *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
94 return ndk::ScopedAStatus::ok();
95 }
96
97 std::vector<size_t> indexes(bufReqs.size());
98 for (size_t i = 0; i < bufReqs.size(); i++) {
99 bool found = false;
100 for (size_t idx = 0; idx < mStreams.size(); idx++) {
101 if (bufReqs[i].streamId == mStreams[idx].id) {
102 found = true;
103 indexes[i] = idx;
104 break;
105 }
106 }
107 if (!found) {
108 ALOGE("%s: illegal buffer request: unknown streamId %d!", __FUNCTION__,
109 bufReqs[i].streamId);
110 ADD_FAILURE();
111 *_aidl_return = BufferRequestStatus::FAILED_ILLEGAL_ARGUMENTS;
112 return ScopedAStatus::ok();
113 }
114 }
115
116 bool allStreamOk = true;
117 bool atLeastOneStreamOk = false;
118 bufRets.resize(bufReqs.size());
119
120 for (size_t i = 0; i < bufReqs.size(); i++) {
121 size_t idx = indexes[i];
122 const auto& stream = mStreams[idx];
123 const auto& halStream = mHalStreams[idx];
124 const BufferRequest& bufReq = bufReqs[i];
125
126 if (mOutstandingBufferIds[idx].size() + bufReq.numBuffersRequested > halStream.maxBuffers) {
127 bufRets[i].streamId = stream.id;
128 bufRets[i].val.set<StreamBuffersVal::Tag::error>(
129 StreamBufferRequestError::MAX_BUFFER_EXCEEDED);
130 allStreamOk = false;
131 continue;
132 }
133
134 std::vector<StreamBuffer> tmpRetBuffers(bufReq.numBuffersRequested);
135 for (size_t j = 0; j < bufReq.numBuffersRequested; j++) {
136 buffer_handle_t handle;
137 uint32_t w = stream.width;
138 uint32_t h = stream.height;
139 if (stream.format == PixelFormat::BLOB) {
140 w = stream.bufferSize;
141 h = 1;
142 }
143
144 CameraAidlTest::allocateGraphicBuffer(
145 w, h,
146 android_convertGralloc1To0Usage(static_cast<uint64_t>(halStream.producerUsage),
147 static_cast<uint64_t>(halStream.consumerUsage)),
148 halStream.overrideFormat, &handle);
149
150 StreamBuffer streamBuffer = StreamBuffer();
151 StreamBuffer& sb = tmpRetBuffers[j];
152 sb = {
153 stream.id, mNextBufferId, ::android::dupToAidl(handle),
154 BufferStatus::OK, NativeHandle(), NativeHandle(),
155 };
156
157 mOutstandingBufferIds[idx][mNextBufferId++] = handle;
158 }
159 atLeastOneStreamOk = true;
160 bufRets[i].streamId = stream.id;
161 bufRets[i].val.set<StreamBuffersVal::Tag::buffers>(std::move(tmpRetBuffers));
162 }
163
164 if (allStreamOk) {
165 *_aidl_return = BufferRequestStatus::OK;
166 } else if (atLeastOneStreamOk) {
167 *_aidl_return = BufferRequestStatus::FAILED_PARTIAL;
168 } else {
169 *_aidl_return = BufferRequestStatus::FAILED_UNKNOWN;
170 }
171
172 if (!hasOutstandingBuffersLocked()) {
173 l.unlock();
174 mFlushedCondition.notify_one();
175 }
176
177 return ndk::ScopedAStatus::ok();
178 }
179
returnStreamBuffers(const std::vector<StreamBuffer> & buffers)180 ScopedAStatus DeviceCb::returnStreamBuffers(const std::vector<StreamBuffer>& buffers) {
181 if (!mUseHalBufManager) {
182 ALOGE("%s: Camera does not support HAL buffer management", __FUNCTION__);
183 ADD_FAILURE();
184 }
185
186 std::unique_lock<std::mutex> l(mLock);
187 for (const auto& buf : buffers) {
188 if (buf.bufferId == 0) {
189 // Don't return buffers of bufId 0 (empty buffer)
190 continue;
191 }
192 bool found = false;
193 for (size_t idx = 0; idx < mOutstandingBufferIds.size(); idx++) {
194 if (mStreams[idx].id == buf.streamId &&
195 mOutstandingBufferIds[idx].count(buf.bufferId) == 1) {
196 mOutstandingBufferIds[idx].erase(buf.bufferId);
197 // TODO: check do we need to close/delete native handle or assume we have enough
198 // memory to run till the test finish? since we do not capture much requests (and
199 // most of time one buffer is sufficient)
200 found = true;
201 break;
202 }
203 }
204 if (found) {
205 continue;
206 }
207 ALOGE("%s: unknown buffer ID %" PRIu64, __FUNCTION__, buf.bufferId);
208 ADD_FAILURE();
209 }
210 if (!hasOutstandingBuffersLocked()) {
211 l.unlock();
212 mFlushedCondition.notify_one();
213 }
214
215 return ndk::ScopedAStatus::ok();
216 }
217
setCurrentStreamConfig(const std::vector<Stream> & streams,const std::vector<HalStream> & halStreams)218 void DeviceCb::setCurrentStreamConfig(const std::vector<Stream>& streams,
219 const std::vector<HalStream>& halStreams) {
220 ASSERT_EQ(streams.size(), halStreams.size());
221 ASSERT_NE(streams.size(), 0);
222 for (size_t i = 0; i < streams.size(); i++) {
223 ASSERT_EQ(streams[i].id, halStreams[i].id);
224 }
225 std::lock_guard<std::mutex> l(mLock);
226 mUseHalBufManager = true;
227 mStreams = streams;
228 mHalStreams = halStreams;
229 mOutstandingBufferIds.clear();
230 for (size_t i = 0; i < streams.size(); i++) {
231 mOutstandingBufferIds.emplace_back();
232 }
233 }
234
waitForBuffersReturned()235 void DeviceCb::waitForBuffersReturned() {
236 std::unique_lock<std::mutex> lk(mLock);
237 if (hasOutstandingBuffersLocked()) {
238 auto timeout = std::chrono::seconds(kBufferReturnTimeoutSec);
239 auto st = mFlushedCondition.wait_for(lk, timeout);
240 ASSERT_NE(std::cv_status::timeout, st);
241 }
242 }
243
processCaptureResultLocked(const CaptureResult & results,std::vector<PhysicalCameraMetadata> physicalCameraMetadata)244 bool DeviceCb::processCaptureResultLocked(
245 const CaptureResult& results, std::vector<PhysicalCameraMetadata> physicalCameraMetadata) {
246 bool notify = false;
247 uint32_t frameNumber = results.frameNumber;
248
249 if ((results.result.metadata.empty()) && (results.outputBuffers.empty()) &&
250 (results.inputBuffer.buffer.fds.empty()) && (results.fmqResultSize == 0)) {
251 ALOGE("%s: No result data provided by HAL for frame %d result count: %d", __func__,
252 frameNumber, (int)results.fmqResultSize);
253 ADD_FAILURE();
254 return notify;
255 }
256
257 auto requestEntry = mParent->mInflightMap.find(frameNumber);
258 if (requestEntry == mParent->mInflightMap.end()) {
259 ALOGE("%s: Unexpected frame number! received: %u", __func__, frameNumber);
260 ADD_FAILURE();
261 return notify;
262 }
263
264 bool isPartialResult = false;
265 bool hasInputBufferInRequest = false;
266 auto& request = requestEntry->second;
267
268 CameraMetadata resultMetadata;
269 size_t resultSize = 0;
270 if (results.fmqResultSize > 0) {
271 resultMetadata.metadata.resize(results.fmqResultSize);
272 if (request->resultQueue == nullptr) {
273 ADD_FAILURE();
274 return notify;
275 }
276
277 if (!request->resultQueue->read(reinterpret_cast<int8_t*>(resultMetadata.metadata.data()),
278 results.fmqResultSize)) {
279 ALOGE("%s: Frame %d: Cannot read camera metadata from fmq,"
280 "size = %" PRIu64,
281 __func__, frameNumber, results.fmqResultSize);
282 ADD_FAILURE();
283 return notify;
284 }
285
286 // Physical device results are only expected in the last/final
287 // partial result notification.
288 bool expectPhysicalResults = !(request->usePartialResult &&
289 (results.partialResult < request->numPartialResults));
290 if (expectPhysicalResults &&
291 (physicalCameraMetadata.size() != request->expectedPhysicalResults.size())) {
292 ALOGE("%s: Frame %d: Returned physical metadata count %zu "
293 "must be equal to expected count %zu",
294 __func__, frameNumber, physicalCameraMetadata.size(),
295 request->expectedPhysicalResults.size());
296 ADD_FAILURE();
297 return notify;
298 }
299 std::vector<std::vector<uint8_t>> physResultMetadata;
300 physResultMetadata.resize(physicalCameraMetadata.size());
301 for (size_t i = 0; i < physicalCameraMetadata.size(); i++) {
302 physResultMetadata[i].resize(physicalCameraMetadata[i].fmqMetadataSize);
303 if (!request->resultQueue->read(reinterpret_cast<int8_t*>(physResultMetadata[i].data()),
304 physicalCameraMetadata[i].fmqMetadataSize)) {
305 ALOGE("%s: Frame %d: Cannot read physical camera metadata from fmq,"
306 "size = %" PRIu64,
307 __func__, frameNumber, physicalCameraMetadata[i].fmqMetadataSize);
308 ADD_FAILURE();
309 return notify;
310 }
311 }
312 resultSize = resultMetadata.metadata.size();
313 } else if (!results.result.metadata.empty()) {
314 resultMetadata = results.result;
315 resultSize = resultMetadata.metadata.size();
316 }
317
318 if (!request->usePartialResult && (resultSize > 0) && (results.partialResult != 1)) {
319 ALOGE("%s: Result is malformed for frame %d: partial_result %u "
320 "must be 1 if partial result is not supported",
321 __func__, frameNumber, results.partialResult);
322 ADD_FAILURE();
323 return notify;
324 }
325
326 if (results.partialResult != 0) {
327 request->partialResultCount = results.partialResult;
328 }
329
330 // Check if this result carries only partial metadata
331 if (request->usePartialResult && (resultSize > 0)) {
332 if ((results.partialResult > request->numPartialResults) || (results.partialResult < 1)) {
333 ALOGE("%s: Result is malformed for frame %d: partial_result %u"
334 " must be in the range of [1, %d] when metadata is "
335 "included in the result",
336 __func__, frameNumber, results.partialResult, request->numPartialResults);
337 ADD_FAILURE();
338 return notify;
339 }
340
341 // Verify no duplicate tags between partial results
342 const camera_metadata_t* partialMetadata =
343 reinterpret_cast<const camera_metadata_t*>(resultMetadata.metadata.data());
344 const camera_metadata_t* collectedMetadata = request->collectedResult.getAndLock();
345 camera_metadata_ro_entry_t searchEntry, foundEntry;
346 for (size_t i = 0; i < get_camera_metadata_entry_count(partialMetadata); i++) {
347 if (0 != get_camera_metadata_ro_entry(partialMetadata, i, &searchEntry)) {
348 ADD_FAILURE();
349 request->collectedResult.unlock(collectedMetadata);
350 return notify;
351 }
352 if (-ENOENT !=
353 find_camera_metadata_ro_entry(collectedMetadata, searchEntry.tag, &foundEntry)) {
354 ADD_FAILURE();
355 request->collectedResult.unlock(collectedMetadata);
356 return notify;
357 }
358 }
359 request->collectedResult.unlock(collectedMetadata);
360 request->collectedResult.append(partialMetadata);
361
362 isPartialResult = (results.partialResult < request->numPartialResults);
363 } else if (resultSize > 0) {
364 request->collectedResult.append(
365 reinterpret_cast<const camera_metadata_t*>(resultMetadata.metadata.data()));
366 isPartialResult = false;
367 }
368
369 hasInputBufferInRequest = request->hasInputBuffer;
370
371 // Did we get the (final) result metadata for this capture?
372 if ((resultSize > 0) && !isPartialResult) {
373 if (request->haveResultMetadata) {
374 ALOGE("%s: Called multiple times with metadata for frame %d", __func__, frameNumber);
375 ADD_FAILURE();
376 return notify;
377 }
378 request->haveResultMetadata = true;
379 request->collectedResult.sort();
380
381 // Verify final result metadata
382 camera_metadata_t* staticMetadataBuffer = mStaticMetadata;
383 bool isMonochrome = Status::OK == CameraAidlTest::isMonochromeCamera(staticMetadataBuffer);
384 if (isMonochrome) {
385 CameraAidlTest::verifyMonochromeCameraResult(request->collectedResult);
386 }
387
388 // Verify logical camera result metadata
389 bool isLogicalCamera =
390 Status::OK == CameraAidlTest::isLogicalMultiCamera(staticMetadataBuffer);
391 if (isLogicalCamera) {
392 camera_metadata_t* collectedMetadata =
393 const_cast<camera_metadata_t*>(request->collectedResult.getAndLock());
394 uint8_t* rawMetadata = reinterpret_cast<uint8_t*>(collectedMetadata);
395 std::vector metadata = std::vector(
396 rawMetadata, rawMetadata + get_camera_metadata_size(collectedMetadata));
397 CameraAidlTest::verifyLogicalCameraResult(staticMetadataBuffer, metadata);
398 request->collectedResult.unlock(collectedMetadata);
399 }
400 }
401
402 uint32_t numBuffersReturned = results.outputBuffers.size();
403 auto& inputBuffer = results.inputBuffer.buffer;
404 if (!inputBuffer.fds.empty() && !inputBuffer.ints.empty()) {
405 if (hasInputBufferInRequest) {
406 numBuffersReturned += 1;
407 } else {
408 ALOGW("%s: Input buffer should be NULL if there is no input"
409 " buffer sent in the request",
410 __func__);
411 }
412 }
413 request->numBuffersLeft -= numBuffersReturned;
414 if (request->numBuffersLeft < 0) {
415 ALOGE("%s: Too many buffers returned for frame %d", __func__, frameNumber);
416 ADD_FAILURE();
417 return notify;
418 }
419
420 for (const auto& buffer : results.outputBuffers) {
421 CameraAidlTest::InFlightRequest::StreamBufferAndTimestamp streamBufferAndTimestamp;
422 auto outstandingBuffers = mUseHalBufManager ? mOutstandingBufferIds :
423 request->mOutstandingBufferIds;
424 auto bufferId = mUseHalBufManager ? buffer.bufferId : results.frameNumber;
425 auto outputBuffer = outstandingBuffers.empty() ? ::android::makeFromAidl(buffer.buffer) :
426 outstandingBuffers[buffer.streamId][bufferId];
427 streamBufferAndTimestamp.buffer = {buffer.streamId,
428 bufferId,
429 outputBuffer,
430 buffer.status,
431 ::android::dupFromAidl(buffer.acquireFence),
432 ::android::dupFromAidl(buffer.releaseFence)};
433 streamBufferAndTimestamp.timeStamp = systemTime();
434 request->resultOutputBuffers.push_back(streamBufferAndTimestamp);
435 }
436 // If shutter event is received notify the pending threads.
437 if (request->shutterTimestamp != 0) {
438 notify = true;
439 }
440
441 if (mUseHalBufManager) {
442 returnStreamBuffers(results.outputBuffers);
443 }
444 return notify;
445 }
446
notifyHelper(const std::vector<NotifyMsg> & msgs,const std::vector<std::pair<bool,nsecs_t>> & readoutTimestamps)447 ScopedAStatus DeviceCb::notifyHelper(
448 const std::vector<NotifyMsg>& msgs,
449 const std::vector<std::pair<bool, nsecs_t>>& readoutTimestamps) {
450 std::lock_guard<std::mutex> l(mParent->mLock);
451
452 for (size_t i = 0; i < msgs.size(); i++) {
453 const NotifyMsg& msg = msgs[i];
454 NotifyMsg::Tag msgTag = msgs[i].getTag();
455 switch (msgTag) {
456 case NotifyMsg::Tag::error:
457 if (ErrorCode::ERROR_DEVICE == msg.get<NotifyMsg::Tag::error>().errorCode) {
458 ALOGE("%s: Camera reported serious device error", __func__);
459 ADD_FAILURE();
460 } else {
461 auto itr = mParent->mInflightMap.find(
462 msg.get<NotifyMsg::Tag::error>().frameNumber);
463 if (itr == mParent->mInflightMap.end()) {
464 ALOGE("%s: Unexpected error frame number! received: %u", __func__,
465 msg.get<NotifyMsg::Tag::error>().frameNumber);
466 ADD_FAILURE();
467 break;
468 }
469
470 auto r = itr->second;
471 if (ErrorCode::ERROR_RESULT == msg.get<NotifyMsg::Tag::error>().errorCode &&
472 msg.get<NotifyMsg::Tag::error>().errorStreamId != -1) {
473 if (r->haveResultMetadata) {
474 ALOGE("%s: Camera must report physical camera result error before "
475 "the final capture result!",
476 __func__);
477 ADD_FAILURE();
478 } else {
479 for (auto& mStream : mStreams) {
480 if (mStream.id == msg.get<NotifyMsg::Tag::error>().errorStreamId) {
481 std::string physicalCameraId = mStream.physicalCameraId;
482 bool idExpected =
483 r->expectedPhysicalResults.find(physicalCameraId) !=
484 r->expectedPhysicalResults.end();
485 if (!idExpected) {
486 ALOGE("%s: ERROR_RESULT's error stream's physicalCameraId "
487 "%s must be expected",
488 __func__, physicalCameraId.c_str());
489 ADD_FAILURE();
490 } else {
491 r->expectedPhysicalResults.erase(physicalCameraId);
492 }
493 break;
494 }
495 }
496 }
497 } else {
498 r->errorCodeValid = true;
499 r->errorCode = msg.get<NotifyMsg::Tag::error>().errorCode;
500 r->errorStreamId = msg.get<NotifyMsg::Tag::error>().errorStreamId;
501 }
502 }
503 break;
504 case NotifyMsg::Tag::shutter:
505 auto itr =
506 mParent->mInflightMap.find(msg.get<NotifyMsg::Tag::shutter>().frameNumber);
507 if (itr == mParent->mInflightMap.end()) {
508 ALOGE("%s: Unexpected shutter frame number! received: %u", __func__,
509 msg.get<NotifyMsg::Tag::shutter>().frameNumber);
510 ADD_FAILURE();
511 break;
512 }
513 auto& r = itr->second;
514 r->shutterTimestamp = msg.get<NotifyMsg::Tag::shutter>().timestamp;
515 r->shutterReadoutTimestampValid = readoutTimestamps[i].first;
516 r->shutterReadoutTimestamp = readoutTimestamps[i].second;
517 break;
518 }
519 }
520
521 mParent->mResultCondition.notify_one();
522 return ScopedAStatus::ok();
523 }
524
hasOutstandingBuffersLocked()525 bool DeviceCb::hasOutstandingBuffersLocked() {
526 if (!mUseHalBufManager) {
527 return false;
528 }
529 for (const auto& outstandingBuffers : mOutstandingBufferIds) {
530 if (!outstandingBuffers.empty()) {
531 return true;
532 }
533 }
534 return false;
535 }
536