1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "EvsV4lCamera.h"
18 #include "EvsEnumerator.h"
19 #include "bufferCopy.h"
20
21 #include <ui/GraphicBufferAllocator.h>
22 #include <ui/GraphicBufferMapper.h>
23
24
25 namespace android {
26 namespace hardware {
27 namespace automotive {
28 namespace evs {
29 namespace V1_0 {
30 namespace implementation {
31
32
33 // Arbitrary limit on number of graphics buffers allowed to be allocated
34 // Safeguards against unreasonable resource consumption and provides a testable limit
35 static const unsigned MAX_BUFFERS_IN_FLIGHT = 100;
36
37
EvsV4lCamera(const char * deviceName)38 EvsV4lCamera::EvsV4lCamera(const char *deviceName) :
39 mFramesAllowed(0),
40 mFramesInUse(0) {
41 ALOGD("EvsV4lCamera instantiated");
42
43 mDescription.cameraId = deviceName;
44
45 // Initialize the video device
46 if (!mVideo.open(deviceName)) {
47 ALOGE("Failed to open v4l device %s\n", deviceName);
48 }
49
50 // NOTE: Our current spec says only support NV21 -- can we stick to that with software
51 // conversion? Will this work with the hardware texture units?
52 // TODO: Settle on the one official format that works on all platforms
53 // TODO: Get NV21 working? It is scrambled somewhere along the way right now.
54 // mFormat = HAL_PIXEL_FORMAT_YCRCB_420_SP; // 420SP == NV21
55 // mFormat = HAL_PIXEL_FORMAT_RGBA_8888;
56 mFormat = HAL_PIXEL_FORMAT_YCBCR_422_I;
57
58 // How we expect to use the gralloc buffers we'll exchange with our client
59 mUsage = GRALLOC_USAGE_HW_TEXTURE |
60 GRALLOC_USAGE_SW_READ_RARELY |
61 GRALLOC_USAGE_SW_WRITE_OFTEN;
62 }
63
64
~EvsV4lCamera()65 EvsV4lCamera::~EvsV4lCamera() {
66 ALOGD("EvsV4lCamera being destroyed");
67 shutdown();
68 }
69
70
71 //
72 // This gets called if another caller "steals" ownership of the camera
73 //
shutdown()74 void EvsV4lCamera::shutdown()
75 {
76 ALOGD("EvsV4lCamera shutdown");
77
78 // Make sure our output stream is cleaned up
79 // (It really should be already)
80 stopVideoStream();
81
82 // Note: Since stopVideoStream is blocking, no other threads can now be running
83
84 // Close our video capture device
85 mVideo.close();
86
87 // Drop all the graphics buffers we've been using
88 if (mBuffers.size() > 0) {
89 GraphicBufferAllocator& alloc(GraphicBufferAllocator::get());
90 for (auto&& rec : mBuffers) {
91 if (rec.inUse) {
92 ALOGW("Error - releasing buffer despite remote ownership");
93 }
94 alloc.free(rec.handle);
95 rec.handle = nullptr;
96 }
97 mBuffers.clear();
98 }
99 }
100
101
102 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
getCameraInfo(getCameraInfo_cb _hidl_cb)103 Return<void> EvsV4lCamera::getCameraInfo(getCameraInfo_cb _hidl_cb) {
104 ALOGD("getCameraInfo");
105
106 // Send back our self description
107 _hidl_cb(mDescription);
108 return Void();
109 }
110
111
setMaxFramesInFlight(uint32_t bufferCount)112 Return<EvsResult> EvsV4lCamera::setMaxFramesInFlight(uint32_t bufferCount) {
113 ALOGD("setMaxFramesInFlight");
114 std::lock_guard<std::mutex> lock(mAccessLock);
115
116 // If we've been displaced by another owner of the camera, then we can't do anything else
117 if (!mVideo.isOpen()) {
118 ALOGW("ignoring setMaxFramesInFlight call when camera has been lost.");
119 return EvsResult::OWNERSHIP_LOST;
120 }
121
122 // We cannot function without at least one video buffer to send data
123 if (bufferCount < 1) {
124 ALOGE("Ignoring setMaxFramesInFlight with less than one buffer requested");
125 return EvsResult::INVALID_ARG;
126 }
127
128 // Update our internal state
129 if (setAvailableFrames_Locked(bufferCount)) {
130 return EvsResult::OK;
131 } else {
132 return EvsResult::BUFFER_NOT_AVAILABLE;
133 }
134 }
135
136
startVideoStream(const::android::sp<IEvsCameraStream> & stream)137 Return<EvsResult> EvsV4lCamera::startVideoStream(const ::android::sp<IEvsCameraStream>& stream) {
138 ALOGD("startVideoStream");
139 std::lock_guard<std::mutex> lock(mAccessLock);
140
141 // If we've been displaced by another owner of the camera, then we can't do anything else
142 if (!mVideo.isOpen()) {
143 ALOGW("ignoring startVideoStream call when camera has been lost.");
144 return EvsResult::OWNERSHIP_LOST;
145 }
146 if (mStream.get() != nullptr) {
147 ALOGE("ignoring startVideoStream call when a stream is already running.");
148 return EvsResult::STREAM_ALREADY_RUNNING;
149 }
150
151 // If the client never indicated otherwise, configure ourselves for a single streaming buffer
152 if (mFramesAllowed < 1) {
153 if (!setAvailableFrames_Locked(1)) {
154 ALOGE("Failed to start stream because we couldn't get a graphics buffer");
155 return EvsResult::BUFFER_NOT_AVAILABLE;
156 }
157 }
158
159 // Choose which image transfer function we need
160 // Map from V4L2 to Android graphic buffer format
161 const uint32_t videoSrcFormat = mVideo.getV4LFormat();
162 ALOGI("Configuring to accept %4.4s camera data and convert to %4.4s",
163 (char*)&videoSrcFormat, (char*)&mFormat);
164
165 // TODO: Simplify this by supporting only ONE fixed output format
166 switch (mFormat) {
167 case HAL_PIXEL_FORMAT_YCRCB_420_SP:
168 switch (videoSrcFormat) {
169 case V4L2_PIX_FMT_NV21: mFillBufferFromVideo = fillNV21FromNV21; break;
170 // case V4L2_PIX_FMT_YV12: mFillBufferFromVideo = fillNV21FromYV12; break;
171 case V4L2_PIX_FMT_YUYV: mFillBufferFromVideo = fillNV21FromYUYV; break;
172 // case V4L2_PIX_FORMAT_NV16: mFillBufferFromVideo = fillNV21FromNV16; break;
173 default:
174 // TODO: Are there other V4L2 formats we must support?
175 ALOGE("Unhandled camera output format %c%c%c%c (0x%8X)\n",
176 ((char*)&videoSrcFormat)[0],
177 ((char*)&videoSrcFormat)[1],
178 ((char*)&videoSrcFormat)[2],
179 ((char*)&videoSrcFormat)[3],
180 videoSrcFormat);
181 }
182 break;
183 case HAL_PIXEL_FORMAT_RGBA_8888:
184 switch (videoSrcFormat) {
185 case V4L2_PIX_FMT_YUYV: mFillBufferFromVideo = fillRGBAFromYUYV; break;
186 default:
187 // TODO: Are there other V4L2 formats we must support?
188 ALOGE("Unhandled camera format %4.4s", (char*)&videoSrcFormat);
189 }
190 break;
191 case HAL_PIXEL_FORMAT_YCBCR_422_I:
192 switch (videoSrcFormat) {
193 case V4L2_PIX_FMT_YUYV: mFillBufferFromVideo = fillYUYVFromYUYV; break;
194 case V4L2_PIX_FMT_UYVY: mFillBufferFromVideo = fillYUYVFromUYVY; break;
195 default:
196 // TODO: Are there other V4L2 formats we must support?
197 ALOGE("Unhandled camera format %4.4s", (char*)&videoSrcFormat);
198 }
199 break;
200 default:
201 // TODO: Why have we told ourselves to output something we don't understand!?
202 ALOGE("Unhandled output format %4.4s", (char*)&mFormat);
203 }
204
205
206 // Record the user's callback for use when we have a frame ready
207 mStream = stream;
208
209 // Set up the video stream with a callback to our member function forwardFrame()
210 if (!mVideo.startStream([this](VideoCapture*, imageBuffer* tgt, void* data) {
211 this->forwardFrame(tgt, data);
212 })
213 ) {
214 mStream = nullptr; // No need to hold onto this if we failed to start
215 ALOGE("underlying camera start stream failed");
216 return EvsResult::UNDERLYING_SERVICE_ERROR;
217 }
218
219 return EvsResult::OK;
220 }
221
222
doneWithFrame(const BufferDesc & buffer)223 Return<void> EvsV4lCamera::doneWithFrame(const BufferDesc& buffer) {
224 ALOGD("doneWithFrame");
225 std::lock_guard <std::mutex> lock(mAccessLock);
226
227 // If we've been displaced by another owner of the camera, then we can't do anything else
228 if (!mVideo.isOpen()) {
229 ALOGW("ignoring doneWithFrame call when camera has been lost.");
230 } else {
231 if (buffer.memHandle == nullptr) {
232 ALOGE("ignoring doneWithFrame called with null handle");
233 } else if (buffer.bufferId >= mBuffers.size()) {
234 ALOGE("ignoring doneWithFrame called with invalid bufferId %d (max is %zu)",
235 buffer.bufferId, mBuffers.size()-1);
236 } else if (!mBuffers[buffer.bufferId].inUse) {
237 ALOGE("ignoring doneWithFrame called on frame %d which is already free",
238 buffer.bufferId);
239 } else {
240 // Mark the frame as available
241 mBuffers[buffer.bufferId].inUse = false;
242 mFramesInUse--;
243
244 // If this frame's index is high in the array, try to move it down
245 // to improve locality after mFramesAllowed has been reduced.
246 if (buffer.bufferId >= mFramesAllowed) {
247 // Find an empty slot lower in the array (which should always exist in this case)
248 for (auto&& rec : mBuffers) {
249 if (rec.handle == nullptr) {
250 rec.handle = mBuffers[buffer.bufferId].handle;
251 mBuffers[buffer.bufferId].handle = nullptr;
252 break;
253 }
254 }
255 }
256 }
257 }
258
259 return Void();
260 }
261
262
stopVideoStream()263 Return<void> EvsV4lCamera::stopVideoStream() {
264 ALOGD("stopVideoStream");
265
266 // Tell the capture device to stop (and block until it does)
267 mVideo.stopStream();
268
269 if (mStream != nullptr) {
270 std::unique_lock <std::mutex> lock(mAccessLock);
271
272 // Send one last NULL frame to signal the actual end of stream
273 BufferDesc nullBuff = {};
274 auto result = mStream->deliverFrame(nullBuff);
275 if (!result.isOk()) {
276 ALOGE("Error delivering end of stream marker");
277 }
278
279 // Drop our reference to the client's stream receiver
280 mStream = nullptr;
281 }
282
283 return Void();
284 }
285
286
getExtendedInfo(uint32_t)287 Return<int32_t> EvsV4lCamera::getExtendedInfo(uint32_t /*opaqueIdentifier*/) {
288 ALOGD("getExtendedInfo");
289 // Return zero by default as required by the spec
290 return 0;
291 }
292
293
setExtendedInfo(uint32_t,int32_t)294 Return<EvsResult> EvsV4lCamera::setExtendedInfo(uint32_t /*opaqueIdentifier*/,
295 int32_t /*opaqueValue*/) {
296 ALOGD("setExtendedInfo");
297 std::lock_guard<std::mutex> lock(mAccessLock);
298
299 // If we've been displaced by another owner of the camera, then we can't do anything else
300 if (!mVideo.isOpen()) {
301 ALOGW("ignoring setExtendedInfo call when camera has been lost.");
302 return EvsResult::OWNERSHIP_LOST;
303 }
304
305 // We don't store any device specific information in this implementation
306 return EvsResult::INVALID_ARG;
307 }
308
309
setAvailableFrames_Locked(unsigned bufferCount)310 bool EvsV4lCamera::setAvailableFrames_Locked(unsigned bufferCount) {
311 if (bufferCount < 1) {
312 ALOGE("Ignoring request to set buffer count to zero");
313 return false;
314 }
315 if (bufferCount > MAX_BUFFERS_IN_FLIGHT) {
316 ALOGE("Rejecting buffer request in excess of internal limit");
317 return false;
318 }
319
320 // Is an increase required?
321 if (mFramesAllowed < bufferCount) {
322 // An increase is required
323 unsigned needed = bufferCount - mFramesAllowed;
324 ALOGI("Allocating %d buffers for camera frames", needed);
325
326 unsigned added = increaseAvailableFrames_Locked(needed);
327 if (added != needed) {
328 // If we didn't add all the frames we needed, then roll back to the previous state
329 ALOGE("Rolling back to previous frame queue size");
330 decreaseAvailableFrames_Locked(added);
331 return false;
332 }
333 } else if (mFramesAllowed > bufferCount) {
334 // A decrease is required
335 unsigned framesToRelease = mFramesAllowed - bufferCount;
336 ALOGI("Returning %d camera frame buffers", framesToRelease);
337
338 unsigned released = decreaseAvailableFrames_Locked(framesToRelease);
339 if (released != framesToRelease) {
340 // This shouldn't happen with a properly behaving client because the client
341 // should only make this call after returning sufficient outstanding buffers
342 // to allow a clean resize.
343 ALOGE("Buffer queue shrink failed -- too many buffers currently in use?");
344 }
345 }
346
347 return true;
348 }
349
350
increaseAvailableFrames_Locked(unsigned numToAdd)351 unsigned EvsV4lCamera::increaseAvailableFrames_Locked(unsigned numToAdd) {
352 // Acquire the graphics buffer allocator
353 GraphicBufferAllocator &alloc(GraphicBufferAllocator::get());
354
355 unsigned added = 0;
356
357
358 while (added < numToAdd) {
359 unsigned pixelsPerLine;
360 buffer_handle_t memHandle = nullptr;
361 status_t result = alloc.allocate(mVideo.getWidth(), mVideo.getHeight(),
362 mFormat, 1,
363 mUsage,
364 &memHandle, &pixelsPerLine, 0, "EvsV4lCamera");
365 if (result != NO_ERROR) {
366 ALOGE("Error %d allocating %d x %d graphics buffer",
367 result,
368 mVideo.getWidth(),
369 mVideo.getHeight());
370 break;
371 }
372 if (!memHandle) {
373 ALOGE("We didn't get a buffer handle back from the allocator");
374 break;
375 }
376 if (mStride) {
377 if (mStride != pixelsPerLine) {
378 ALOGE("We did not expect to get buffers with different strides!");
379 }
380 } else {
381 // Gralloc defines stride in terms of pixels per line
382 mStride = pixelsPerLine;
383 }
384
385 // Find a place to store the new buffer
386 bool stored = false;
387 for (auto&& rec : mBuffers) {
388 if (rec.handle == nullptr) {
389 // Use this existing entry
390 rec.handle = memHandle;
391 rec.inUse = false;
392 stored = true;
393 break;
394 }
395 }
396 if (!stored) {
397 // Add a BufferRecord wrapping this handle to our set of available buffers
398 mBuffers.emplace_back(memHandle);
399 }
400
401 mFramesAllowed++;
402 added++;
403 }
404
405 return added;
406 }
407
408
decreaseAvailableFrames_Locked(unsigned numToRemove)409 unsigned EvsV4lCamera::decreaseAvailableFrames_Locked(unsigned numToRemove) {
410 // Acquire the graphics buffer allocator
411 GraphicBufferAllocator &alloc(GraphicBufferAllocator::get());
412
413 unsigned removed = 0;
414
415 for (auto&& rec : mBuffers) {
416 // Is this record not in use, but holding a buffer that we can free?
417 if ((rec.inUse == false) && (rec.handle != nullptr)) {
418 // Release buffer and update the record so we can recognize it as "empty"
419 alloc.free(rec.handle);
420 rec.handle = nullptr;
421
422 mFramesAllowed--;
423 removed++;
424
425 if (removed == numToRemove) {
426 break;
427 }
428 }
429 }
430
431 return removed;
432 }
433
434
435 // This is the async callback from the video camera that tells us a frame is ready
forwardFrame(imageBuffer *,void * pData)436 void EvsV4lCamera::forwardFrame(imageBuffer* /*pV4lBuff*/, void* pData) {
437 bool readyForFrame = false;
438 size_t idx = 0;
439
440 // Lock scope for updating shared state
441 {
442 std::lock_guard<std::mutex> lock(mAccessLock);
443
444 // Are we allowed to issue another buffer?
445 if (mFramesInUse >= mFramesAllowed) {
446 // Can't do anything right now -- skip this frame
447 ALOGW("Skipped a frame because too many are in flight\n");
448 } else {
449 // Identify an available buffer to fill
450 for (idx = 0; idx < mBuffers.size(); idx++) {
451 if (!mBuffers[idx].inUse) {
452 if (mBuffers[idx].handle != nullptr) {
453 // Found an available record, so stop looking
454 break;
455 }
456 }
457 }
458 if (idx >= mBuffers.size()) {
459 // This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed
460 ALOGE("Failed to find an available buffer slot\n");
461 } else {
462 // We're going to make the frame busy
463 mBuffers[idx].inUse = true;
464 mFramesInUse++;
465 readyForFrame = true;
466 }
467 }
468 }
469
470 if (!readyForFrame) {
471 // We need to return the vide buffer so it can capture a new frame
472 mVideo.markFrameConsumed();
473 } else {
474 // Assemble the buffer description we'll transmit below
475 BufferDesc buff = {};
476 buff.width = mVideo.getWidth();
477 buff.height = mVideo.getHeight();
478 buff.stride = mStride;
479 buff.format = mFormat;
480 buff.usage = mUsage;
481 buff.bufferId = idx;
482 buff.memHandle = mBuffers[idx].handle;
483
484 // Lock our output buffer for writing
485 void *targetPixels = nullptr;
486 GraphicBufferMapper &mapper = GraphicBufferMapper::get();
487 mapper.lock(buff.memHandle,
488 GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_SW_READ_NEVER,
489 android::Rect(buff.width, buff.height),
490 (void **) &targetPixels);
491
492 // If we failed to lock the pixel buffer, we're about to crash, but log it first
493 if (!targetPixels) {
494 ALOGE("Camera failed to gain access to image buffer for writing");
495 }
496
497 // Transfer the video image into the output buffer, making any needed
498 // format conversion along the way
499 mFillBufferFromVideo(buff, (uint8_t*)targetPixels, pData, mVideo.getStride());
500
501 // Unlock the output buffer
502 mapper.unlock(buff.memHandle);
503
504
505 // Give the video frame back to the underlying device for reuse
506 // Note that we do this before making the client callback to give the underlying
507 // camera more time to capture the next frame.
508 mVideo.markFrameConsumed();
509
510 // Issue the (asynchronous) callback to the client -- can't be holding the lock
511 auto result = mStream->deliverFrame(buff);
512 if (result.isOk()) {
513 ALOGD("Delivered %p as id %d", buff.memHandle.getNativeHandle(), buff.bufferId);
514 } else {
515 // This can happen if the client dies and is likely unrecoverable.
516 // To avoid consuming resources generating failing calls, we stop sending
517 // frames. Note, however, that the stream remains in the "STREAMING" state
518 // until cleaned up on the main thread.
519 ALOGE("Frame delivery call failed in the transport layer.");
520
521 // Since we didn't actually deliver it, mark the frame as available
522 std::lock_guard<std::mutex> lock(mAccessLock);
523 mBuffers[idx].inUse = false;
524 mFramesInUse--;
525 }
526 }
527 }
528
529 } // namespace implementation
530 } // namespace V1_0
531 } // namespace evs
532 } // namespace automotive
533 } // namespace hardware
534 } // namespace android
535