• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "Camera2-Device"
18 #define ATRACE_TAG ATRACE_TAG_CAMERA
19 //#define LOG_NDEBUG 0
20 //#define LOG_NNDEBUG 0  // Per-frame verbose logging
21 
22 #ifdef LOG_NNDEBUG
23 #define ALOGVV(...) ALOGV(__VA_ARGS__)
24 #else
25 #define ALOGVV(...) ((void)0)
26 #endif
27 
28 #include <inttypes.h>
29 #include <utils/Log.h>
30 #include <utils/Trace.h>
31 #include <utils/Timers.h>
32 #include "Camera2Device.h"
33 #include "CameraService.h"
34 
35 namespace android {
36 
Camera2Device(int id)37 Camera2Device::Camera2Device(int id):
38         mId(id),
39         mHal2Device(NULL)
40 {
41     ATRACE_CALL();
42     ALOGV("%s: Created device for camera %d", __FUNCTION__, id);
43 }
44 
~Camera2Device()45 Camera2Device::~Camera2Device()
46 {
47     ATRACE_CALL();
48     ALOGV("%s: Tearing down for camera id %d", __FUNCTION__, mId);
49     disconnect();
50 }
51 
getId() const52 int Camera2Device::getId() const {
53     return mId;
54 }
55 
initialize(CameraModule * module)56 status_t Camera2Device::initialize(CameraModule *module)
57 {
58     ATRACE_CALL();
59     ALOGV("%s: Initializing device for camera %d", __FUNCTION__, mId);
60     if (mHal2Device != NULL) {
61         ALOGE("%s: Already initialized!", __FUNCTION__);
62         return INVALID_OPERATION;
63     }
64 
65     status_t res;
66     char name[10];
67     snprintf(name, sizeof(name), "%d", mId);
68 
69     camera2_device_t *device;
70 
71     res = module->open(name, reinterpret_cast<hw_device_t**>(&device));
72 
73     if (res != OK) {
74         ALOGE("%s: Could not open camera %d: %s (%d)", __FUNCTION__,
75                 mId, strerror(-res), res);
76         return res;
77     }
78 
79     if (device->common.version != CAMERA_DEVICE_API_VERSION_2_0) {
80         ALOGE("%s: Could not open camera %d: "
81                 "Camera device is not version %x, reports %x instead",
82                 __FUNCTION__, mId, CAMERA_DEVICE_API_VERSION_2_0,
83                 device->common.version);
84         device->common.close(&device->common);
85         return BAD_VALUE;
86     }
87 
88     camera_info info;
89     res = module->getCameraInfo(mId, &info);
90     if (res != OK ) return res;
91 
92     if (info.device_version != device->common.version) {
93         ALOGE("%s: HAL reporting mismatched camera_info version (%x)"
94                 " and device version (%x).", __FUNCTION__,
95                 device->common.version, info.device_version);
96         device->common.close(&device->common);
97         return BAD_VALUE;
98     }
99 
100     res = mRequestQueue.setConsumerDevice(device);
101     if (res != OK) {
102         ALOGE("%s: Camera %d: Unable to connect request queue to device: %s (%d)",
103                 __FUNCTION__, mId, strerror(-res), res);
104         device->common.close(&device->common);
105         return res;
106     }
107     res = mFrameQueue.setProducerDevice(device);
108     if (res != OK) {
109         ALOGE("%s: Camera %d: Unable to connect frame queue to device: %s (%d)",
110                 __FUNCTION__, mId, strerror(-res), res);
111         device->common.close(&device->common);
112         return res;
113     }
114 
115     res = device->ops->set_notify_callback(device, notificationCallback,
116             NULL);
117     if (res != OK) {
118         ALOGE("%s: Camera %d: Unable to initialize notification callback!",
119                 __FUNCTION__, mId);
120         device->common.close(&device->common);
121         return res;
122     }
123 
124     mDeviceInfo = info.static_camera_characteristics;
125     mHal2Device = device;
126     mDeviceVersion = device->common.version;
127 
128     return OK;
129 }
130 
disconnect()131 status_t Camera2Device::disconnect() {
132     ATRACE_CALL();
133     status_t res = OK;
134     if (mHal2Device) {
135         ALOGV("%s: Closing device for camera %d", __FUNCTION__, mId);
136 
137         int inProgressCount = mHal2Device->ops->get_in_progress_count(mHal2Device);
138         if (inProgressCount > 0) {
139             ALOGW("%s: Closing camera device %d with %d requests in flight!",
140                     __FUNCTION__, mId, inProgressCount);
141         }
142         mReprocessStreams.clear();
143         mStreams.clear();
144         res = mHal2Device->common.close(&mHal2Device->common);
145         if (res != OK) {
146             ALOGE("%s: Could not close camera %d: %s (%d)",
147                     __FUNCTION__,
148                     mId, strerror(-res), res);
149         }
150         mHal2Device = NULL;
151         ALOGV("%s: Shutdown complete", __FUNCTION__);
152     }
153     return res;
154 }
155 
dump(int fd,const Vector<String16> & args)156 status_t Camera2Device::dump(int fd, const Vector<String16>& args) {
157     ATRACE_CALL();
158     String8 result;
159     int detailLevel = 0;
160     int n = args.size();
161     String16 detailOption("-d");
162     for (int i = 0; i + 1 < n; i++) {
163         if (args[i] == detailOption) {
164             String8 levelStr(args[i+1]);
165             detailLevel = atoi(levelStr.string());
166         }
167     }
168 
169     result.appendFormat("  Camera2Device[%d] dump (detail level %d):\n",
170             mId, detailLevel);
171 
172     if (detailLevel > 0) {
173         result = "    Request queue contents:\n";
174         write(fd, result.string(), result.size());
175         mRequestQueue.dump(fd, args);
176 
177         result = "    Frame queue contents:\n";
178         write(fd, result.string(), result.size());
179         mFrameQueue.dump(fd, args);
180     }
181 
182     result = "    Active streams:\n";
183     write(fd, result.string(), result.size());
184     for (StreamList::iterator s = mStreams.begin(); s != mStreams.end(); s++) {
185         (*s)->dump(fd, args);
186     }
187 
188     result = "    HAL device dump:\n";
189     write(fd, result.string(), result.size());
190 
191     status_t res;
192     res = mHal2Device->ops->dump(mHal2Device, fd);
193 
194     return res;
195 }
196 
info() const197 const CameraMetadata& Camera2Device::info() const {
198     ALOGVV("%s: E", __FUNCTION__);
199 
200     return mDeviceInfo;
201 }
202 
capture(CameraMetadata & request,int64_t *)203 status_t Camera2Device::capture(CameraMetadata &request, int64_t* /*lastFrameNumber*/) {
204     ATRACE_CALL();
205     ALOGV("%s: E", __FUNCTION__);
206 
207     mRequestQueue.enqueue(request.release());
208     return OK;
209 }
210 
captureList(const List<const CameraMetadata> & requests,int64_t *)211 status_t Camera2Device::captureList(const List<const CameraMetadata> &requests,
212                                     int64_t* /*lastFrameNumber*/) {
213     ATRACE_CALL();
214     ALOGE("%s: Camera2Device burst capture not implemented", __FUNCTION__);
215     return INVALID_OPERATION;
216 }
217 
setStreamingRequest(const CameraMetadata & request,int64_t *)218 status_t Camera2Device::setStreamingRequest(const CameraMetadata &request,
219                                             int64_t* /*lastFrameNumber*/) {
220     ATRACE_CALL();
221     ALOGV("%s: E", __FUNCTION__);
222     CameraMetadata streamRequest(request);
223     return mRequestQueue.setStreamSlot(streamRequest.release());
224 }
225 
setStreamingRequestList(const List<const CameraMetadata> & requests,int64_t *)226 status_t Camera2Device::setStreamingRequestList(const List<const CameraMetadata> &requests,
227                                                 int64_t* /*lastFrameNumber*/) {
228     ATRACE_CALL();
229     ALOGE("%s, Camera2Device streaming burst not implemented", __FUNCTION__);
230     return INVALID_OPERATION;
231 }
232 
clearStreamingRequest(int64_t *)233 status_t Camera2Device::clearStreamingRequest(int64_t* /*lastFrameNumber*/) {
234     ATRACE_CALL();
235     return mRequestQueue.setStreamSlot(NULL);
236 }
237 
waitUntilRequestReceived(int32_t requestId,nsecs_t timeout)238 status_t Camera2Device::waitUntilRequestReceived(int32_t requestId, nsecs_t timeout) {
239     ATRACE_CALL();
240     return mRequestQueue.waitForDequeue(requestId, timeout);
241 }
242 
createStream(sp<Surface> consumer,uint32_t width,uint32_t height,int format,android_dataspace,camera3_stream_rotation_t rotation,int * id)243 status_t Camera2Device::createStream(sp<Surface> consumer,
244         uint32_t width, uint32_t height, int format,
245         android_dataspace /*dataSpace*/, camera3_stream_rotation_t rotation, int *id) {
246     ATRACE_CALL();
247     status_t res;
248     ALOGV("%s: E", __FUNCTION__);
249 
250     sp<StreamAdapter> stream = new StreamAdapter(mHal2Device);
251     size_t size = 0;
252     if (format == HAL_PIXEL_FORMAT_BLOB) {
253         size = getJpegBufferSize(width, height);
254     }
255     res = stream->connectToDevice(consumer, width, height, format, size);
256     if (res != OK) {
257         ALOGE("%s: Camera %d: Unable to create stream (%d x %d, format %x):"
258                 "%s (%d)",
259                 __FUNCTION__, mId, width, height, format, strerror(-res), res);
260         return res;
261     }
262 
263     *id = stream->getId();
264 
265     mStreams.push_back(stream);
266     return OK;
267 }
268 
getJpegBufferSize(uint32_t width,uint32_t height) const269 ssize_t Camera2Device::getJpegBufferSize(uint32_t width, uint32_t height) const {
270     // Always give the max jpeg buffer size regardless of the actual jpeg resolution.
271     camera_metadata_ro_entry jpegBufMaxSize = mDeviceInfo.find(ANDROID_JPEG_MAX_SIZE);
272     if (jpegBufMaxSize.count == 0) {
273         ALOGE("%s: Camera %d: Can't find maximum JPEG size in static metadata!", __FUNCTION__, mId);
274         return BAD_VALUE;
275     }
276 
277     return jpegBufMaxSize.data.i32[0];
278 }
279 
createReprocessStreamFromStream(int outputId,int * id)280 status_t Camera2Device::createReprocessStreamFromStream(int outputId, int *id) {
281     ATRACE_CALL();
282     status_t res;
283     ALOGV("%s: E", __FUNCTION__);
284 
285     bool found = false;
286     StreamList::iterator streamI;
287     for (streamI = mStreams.begin();
288          streamI != mStreams.end(); streamI++) {
289         if ((*streamI)->getId() == outputId) {
290             found = true;
291             break;
292         }
293     }
294     if (!found) {
295         ALOGE("%s: Camera %d: Output stream %d doesn't exist; can't create "
296                 "reprocess stream from it!", __FUNCTION__, mId, outputId);
297         return BAD_VALUE;
298     }
299 
300     sp<ReprocessStreamAdapter> stream = new ReprocessStreamAdapter(mHal2Device);
301 
302     res = stream->connectToDevice((*streamI));
303     if (res != OK) {
304         ALOGE("%s: Camera %d: Unable to create reprocessing stream from "\
305                 "stream %d: %s (%d)", __FUNCTION__, mId, outputId,
306                 strerror(-res), res);
307         return res;
308     }
309 
310     *id = stream->getId();
311 
312     mReprocessStreams.push_back(stream);
313     return OK;
314 }
315 
316 
getStreamInfo(int id,uint32_t * width,uint32_t * height,uint32_t * format,android_dataspace * dataSpace)317 status_t Camera2Device::getStreamInfo(int id,
318         uint32_t *width, uint32_t *height,
319         uint32_t *format, android_dataspace *dataSpace) {
320     ATRACE_CALL();
321     ALOGV("%s: E", __FUNCTION__);
322     bool found = false;
323     StreamList::iterator streamI;
324     for (streamI = mStreams.begin();
325          streamI != mStreams.end(); streamI++) {
326         if ((*streamI)->getId() == id) {
327             found = true;
328             break;
329         }
330     }
331     if (!found) {
332         ALOGE("%s: Camera %d: Stream %d does not exist",
333                 __FUNCTION__, mId, id);
334         return BAD_VALUE;
335     }
336 
337     if (width) *width = (*streamI)->getWidth();
338     if (height) *height = (*streamI)->getHeight();
339     if (format) *format = (*streamI)->getFormat();
340     if (dataSpace) *dataSpace = HAL_DATASPACE_UNKNOWN;
341 
342     return OK;
343 }
344 
setStreamTransform(int id,int transform)345 status_t Camera2Device::setStreamTransform(int id,
346         int transform) {
347     ATRACE_CALL();
348     ALOGV("%s: E", __FUNCTION__);
349     bool found = false;
350     StreamList::iterator streamI;
351     for (streamI = mStreams.begin();
352          streamI != mStreams.end(); streamI++) {
353         if ((*streamI)->getId() == id) {
354             found = true;
355             break;
356         }
357     }
358     if (!found) {
359         ALOGE("%s: Camera %d: Stream %d does not exist",
360                 __FUNCTION__, mId, id);
361         return BAD_VALUE;
362     }
363 
364     return (*streamI)->setTransform(transform);
365 }
366 
deleteStream(int id)367 status_t Camera2Device::deleteStream(int id) {
368     ATRACE_CALL();
369     ALOGV("%s: E", __FUNCTION__);
370     bool found = false;
371     for (StreamList::iterator streamI = mStreams.begin();
372          streamI != mStreams.end(); streamI++) {
373         if ((*streamI)->getId() == id) {
374             status_t res = (*streamI)->release();
375             if (res != OK) {
376                 ALOGE("%s: Unable to release stream %d from HAL device: "
377                         "%s (%d)", __FUNCTION__, id, strerror(-res), res);
378                 return res;
379             }
380             mStreams.erase(streamI);
381             found = true;
382             break;
383         }
384     }
385     if (!found) {
386         ALOGE("%s: Camera %d: Unable to find stream %d to delete",
387                 __FUNCTION__, mId, id);
388         return BAD_VALUE;
389     }
390     return OK;
391 }
392 
deleteReprocessStream(int id)393 status_t Camera2Device::deleteReprocessStream(int id) {
394     ATRACE_CALL();
395     ALOGV("%s: E", __FUNCTION__);
396     bool found = false;
397     for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin();
398          streamI != mReprocessStreams.end(); streamI++) {
399         if ((*streamI)->getId() == id) {
400             status_t res = (*streamI)->release();
401             if (res != OK) {
402                 ALOGE("%s: Unable to release reprocess stream %d from "
403                         "HAL device: %s (%d)", __FUNCTION__, id,
404                         strerror(-res), res);
405                 return res;
406             }
407             mReprocessStreams.erase(streamI);
408             found = true;
409             break;
410         }
411     }
412     if (!found) {
413         ALOGE("%s: Camera %d: Unable to find stream %d to delete",
414                 __FUNCTION__, mId, id);
415         return BAD_VALUE;
416     }
417     return OK;
418 }
419 
configureStreams(bool isConstrainedHighSpeed)420 status_t Camera2Device::configureStreams(bool isConstrainedHighSpeed) {
421     ATRACE_CALL();
422     ALOGV("%s: E", __FUNCTION__);
423 
424     /**
425      * HAL2 devices do not need to configure streams;
426      * streams are created on the fly.
427      */
428     ALOGW("%s: No-op for HAL2 devices", __FUNCTION__);
429 
430     return OK;
431 }
432 
433 
createDefaultRequest(int templateId,CameraMetadata * request)434 status_t Camera2Device::createDefaultRequest(int templateId,
435         CameraMetadata *request) {
436     ATRACE_CALL();
437     status_t err;
438     ALOGV("%s: E", __FUNCTION__);
439     camera_metadata_t *rawRequest;
440     err = mHal2Device->ops->construct_default_request(
441         mHal2Device, templateId, &rawRequest);
442     request->acquire(rawRequest);
443     return err;
444 }
445 
waitUntilDrained()446 status_t Camera2Device::waitUntilDrained() {
447     ATRACE_CALL();
448     static const uint32_t kSleepTime = 50000; // 50 ms
449     static const uint32_t kMaxSleepTime = 10000000; // 10 s
450     ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId);
451     if (mRequestQueue.getBufferCount() ==
452             CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS) return INVALID_OPERATION;
453 
454     // TODO: Set up notifications from HAL, instead of sleeping here
455     uint32_t totalTime = 0;
456     while (mHal2Device->ops->get_in_progress_count(mHal2Device) > 0) {
457         usleep(kSleepTime);
458         totalTime += kSleepTime;
459         if (totalTime > kMaxSleepTime) {
460             ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__,
461                     totalTime, mHal2Device->ops->get_in_progress_count(mHal2Device));
462             return TIMED_OUT;
463         }
464     }
465     ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId);
466     return OK;
467 }
468 
setNotifyCallback(NotificationListener * listener)469 status_t Camera2Device::setNotifyCallback(NotificationListener *listener) {
470     ATRACE_CALL();
471     status_t res;
472     res = mHal2Device->ops->set_notify_callback(mHal2Device, notificationCallback,
473             reinterpret_cast<void*>(listener) );
474     if (res != OK) {
475         ALOGE("%s: Unable to set notification callback!", __FUNCTION__);
476     }
477     return res;
478 }
479 
willNotify3A()480 bool Camera2Device::willNotify3A() {
481     return true;
482 }
483 
notificationCallback(int32_t msg_type,int32_t ext1,int32_t ext2,int32_t ext3,void * user)484 void Camera2Device::notificationCallback(int32_t msg_type,
485         int32_t ext1,
486         int32_t ext2,
487         int32_t ext3,
488         void *user) {
489     ATRACE_CALL();
490     NotificationListener *listener = reinterpret_cast<NotificationListener*>(user);
491     ALOGV("%s: Notification %d, arguments %d, %d, %d", __FUNCTION__, msg_type,
492             ext1, ext2, ext3);
493     if (listener != NULL) {
494         switch (msg_type) {
495             case CAMERA2_MSG_ERROR:
496                 // TODO: This needs to be fixed. ext2 and ext3 need to be considered.
497                 listener->notifyError(
498                         ((ext1 == CAMERA2_MSG_ERROR_DEVICE)
499                         || (ext1 == CAMERA2_MSG_ERROR_HARDWARE)) ?
500                                 ICameraDeviceCallbacks::ERROR_CAMERA_DEVICE :
501                                 ICameraDeviceCallbacks::ERROR_CAMERA_SERVICE,
502                         CaptureResultExtras());
503                 break;
504             case CAMERA2_MSG_SHUTTER: {
505                 // TODO: Only needed for camera2 API, which is unsupported
506                 // by HAL2 directly.
507                 // nsecs_t timestamp = (nsecs_t)ext2 | ((nsecs_t)(ext3) << 32 );
508                 // listener->notifyShutter(requestId, timestamp);
509                 break;
510             }
511             case CAMERA2_MSG_AUTOFOCUS:
512                 listener->notifyAutoFocus(ext1, ext2);
513                 break;
514             case CAMERA2_MSG_AUTOEXPOSURE:
515                 listener->notifyAutoExposure(ext1, ext2);
516                 break;
517             case CAMERA2_MSG_AUTOWB:
518                 listener->notifyAutoWhitebalance(ext1, ext2);
519                 break;
520             default:
521                 ALOGE("%s: Unknown notification %d (arguments %d, %d, %d)!",
522                         __FUNCTION__, msg_type, ext1, ext2, ext3);
523         }
524     }
525 }
526 
waitForNextFrame(nsecs_t timeout)527 status_t Camera2Device::waitForNextFrame(nsecs_t timeout) {
528     return mFrameQueue.waitForBuffer(timeout);
529 }
530 
getNextResult(CaptureResult * result)531 status_t Camera2Device::getNextResult(CaptureResult *result) {
532     ATRACE_CALL();
533     ALOGV("%s: get CaptureResult", __FUNCTION__);
534     if (result == NULL) {
535         ALOGE("%s: result pointer is NULL", __FUNCTION__);
536         return BAD_VALUE;
537     }
538     status_t res;
539     camera_metadata_t *rawFrame;
540     res = mFrameQueue.dequeue(&rawFrame);
541     if (rawFrame == NULL) {
542         return NOT_ENOUGH_DATA;
543     } else if (res == OK) {
544         result->mMetadata.acquire(rawFrame);
545     }
546 
547     return res;
548 }
549 
triggerAutofocus(uint32_t id)550 status_t Camera2Device::triggerAutofocus(uint32_t id) {
551     ATRACE_CALL();
552     status_t res;
553     ALOGV("%s: Triggering autofocus, id %d", __FUNCTION__, id);
554     res = mHal2Device->ops->trigger_action(mHal2Device,
555             CAMERA2_TRIGGER_AUTOFOCUS, id, 0);
556     if (res != OK) {
557         ALOGE("%s: Error triggering autofocus (id %d)",
558                 __FUNCTION__, id);
559     }
560     return res;
561 }
562 
triggerCancelAutofocus(uint32_t id)563 status_t Camera2Device::triggerCancelAutofocus(uint32_t id) {
564     ATRACE_CALL();
565     status_t res;
566     ALOGV("%s: Canceling autofocus, id %d", __FUNCTION__, id);
567     res = mHal2Device->ops->trigger_action(mHal2Device,
568             CAMERA2_TRIGGER_CANCEL_AUTOFOCUS, id, 0);
569     if (res != OK) {
570         ALOGE("%s: Error canceling autofocus (id %d)",
571                 __FUNCTION__, id);
572     }
573     return res;
574 }
575 
triggerPrecaptureMetering(uint32_t id)576 status_t Camera2Device::triggerPrecaptureMetering(uint32_t id) {
577     ATRACE_CALL();
578     status_t res;
579     ALOGV("%s: Triggering precapture metering, id %d", __FUNCTION__, id);
580     res = mHal2Device->ops->trigger_action(mHal2Device,
581             CAMERA2_TRIGGER_PRECAPTURE_METERING, id, 0);
582     if (res != OK) {
583         ALOGE("%s: Error triggering precapture metering (id %d)",
584                 __FUNCTION__, id);
585     }
586     return res;
587 }
588 
pushReprocessBuffer(int reprocessStreamId,buffer_handle_t * buffer,wp<BufferReleasedListener> listener)589 status_t Camera2Device::pushReprocessBuffer(int reprocessStreamId,
590         buffer_handle_t *buffer, wp<BufferReleasedListener> listener) {
591     ATRACE_CALL();
592     ALOGV("%s: E", __FUNCTION__);
593     bool found = false;
594     status_t res = OK;
595     for (ReprocessStreamList::iterator streamI = mReprocessStreams.begin();
596          streamI != mReprocessStreams.end(); streamI++) {
597         if ((*streamI)->getId() == reprocessStreamId) {
598             res = (*streamI)->pushIntoStream(buffer, listener);
599             if (res != OK) {
600                 ALOGE("%s: Unable to push buffer to reprocess stream %d: %s (%d)",
601                         __FUNCTION__, reprocessStreamId, strerror(-res), res);
602                 return res;
603             }
604             found = true;
605             break;
606         }
607     }
608     if (!found) {
609         ALOGE("%s: Camera %d: Unable to find reprocess stream %d",
610                 __FUNCTION__, mId, reprocessStreamId);
611         res = BAD_VALUE;
612     }
613     return res;
614 }
615 
flush(int64_t *)616 status_t Camera2Device::flush(int64_t* /*lastFrameNumber*/) {
617     ATRACE_CALL();
618 
619     mRequestQueue.clear();
620     return waitUntilDrained();
621 }
622 
prepare(int streamId)623 status_t Camera2Device::prepare(int streamId) {
624     ATRACE_CALL();
625     ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
626     return NO_INIT;
627 }
628 
tearDown(int streamId)629 status_t Camera2Device::tearDown(int streamId) {
630     ATRACE_CALL();
631     ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
632     return NO_INIT;
633 }
634 
prepare(int maxCount,int streamId)635 status_t Camera2Device::prepare(int maxCount, int streamId) {
636     ATRACE_CALL();
637     ALOGE("%s: Camera %d: unimplemented", __FUNCTION__, mId);
638     return NO_INIT;
639 }
640 
getDeviceVersion()641 uint32_t Camera2Device::getDeviceVersion() {
642     ATRACE_CALL();
643     return mDeviceVersion;
644 }
645 
646 /**
647  * Camera2Device::MetadataQueue
648  */
649 
MetadataQueue()650 Camera2Device::MetadataQueue::MetadataQueue():
651             mHal2Device(NULL),
652             mFrameCount(0),
653             mLatestRequestId(0),
654             mCount(0),
655             mStreamSlotCount(0),
656             mSignalConsumer(true)
657 {
658     ATRACE_CALL();
659     camera2_request_queue_src_ops::dequeue_request = consumer_dequeue;
660     camera2_request_queue_src_ops::request_count = consumer_buffer_count;
661     camera2_request_queue_src_ops::free_request = consumer_free;
662 
663     camera2_frame_queue_dst_ops::dequeue_frame = producer_dequeue;
664     camera2_frame_queue_dst_ops::cancel_frame = producer_cancel;
665     camera2_frame_queue_dst_ops::enqueue_frame = producer_enqueue;
666 }
667 
~MetadataQueue()668 Camera2Device::MetadataQueue::~MetadataQueue() {
669     ATRACE_CALL();
670     clear();
671 }
672 
673 // Connect to camera2 HAL as consumer (input requests/reprocessing)
setConsumerDevice(camera2_device_t * d)674 status_t Camera2Device::MetadataQueue::setConsumerDevice(camera2_device_t *d) {
675     ATRACE_CALL();
676     status_t res;
677     res = d->ops->set_request_queue_src_ops(d,
678             this);
679     if (res != OK) return res;
680     mHal2Device = d;
681     return OK;
682 }
683 
setProducerDevice(camera2_device_t * d)684 status_t Camera2Device::MetadataQueue::setProducerDevice(camera2_device_t *d) {
685     ATRACE_CALL();
686     status_t res;
687     res = d->ops->set_frame_queue_dst_ops(d,
688             this);
689     return res;
690 }
691 
692 // Real interfaces
enqueue(camera_metadata_t * buf)693 status_t Camera2Device::MetadataQueue::enqueue(camera_metadata_t *buf) {
694     ATRACE_CALL();
695     ALOGVV("%s: E", __FUNCTION__);
696     Mutex::Autolock l(mMutex);
697 
698     mCount++;
699     mEntries.push_back(buf);
700 
701     return signalConsumerLocked();
702 }
703 
getBufferCount()704 int Camera2Device::MetadataQueue::getBufferCount() {
705     ATRACE_CALL();
706     Mutex::Autolock l(mMutex);
707     if (mStreamSlotCount > 0) {
708         return CAMERA2_REQUEST_QUEUE_IS_BOTTOMLESS;
709     }
710     return mCount;
711 }
712 
dequeue(camera_metadata_t ** buf,bool incrementCount)713 status_t Camera2Device::MetadataQueue::dequeue(camera_metadata_t **buf,
714         bool incrementCount)
715 {
716     ATRACE_CALL();
717     ALOGVV("%s: E", __FUNCTION__);
718     status_t res;
719     Mutex::Autolock l(mMutex);
720 
721     if (mCount == 0) {
722         if (mStreamSlotCount == 0) {
723             ALOGVV("%s: Empty", __FUNCTION__);
724             *buf = NULL;
725             mSignalConsumer = true;
726             return OK;
727         }
728         ALOGVV("%s: Streaming %d frames to queue", __FUNCTION__,
729               mStreamSlotCount);
730 
731         for (List<camera_metadata_t*>::iterator slotEntry = mStreamSlot.begin();
732                 slotEntry != mStreamSlot.end();
733                 slotEntry++ ) {
734             size_t entries = get_camera_metadata_entry_count(*slotEntry);
735             size_t dataBytes = get_camera_metadata_data_count(*slotEntry);
736 
737             camera_metadata_t *copy =
738                     allocate_camera_metadata(entries, dataBytes);
739             append_camera_metadata(copy, *slotEntry);
740             mEntries.push_back(copy);
741         }
742         mCount = mStreamSlotCount;
743     }
744     ALOGVV("MetadataQueue: deque (%d buffers)", mCount);
745     camera_metadata_t *b = *(mEntries.begin());
746     mEntries.erase(mEntries.begin());
747 
748     if (incrementCount) {
749         ATRACE_INT("cam2_request", mFrameCount);
750         camera_metadata_entry_t frameCount;
751         res = find_camera_metadata_entry(b,
752                 ANDROID_REQUEST_FRAME_COUNT,
753                 &frameCount);
754         if (res != OK) {
755             ALOGE("%s: Unable to add frame count: %s (%d)",
756                     __FUNCTION__, strerror(-res), res);
757         } else {
758             *frameCount.data.i32 = mFrameCount;
759         }
760         mFrameCount++;
761     }
762 
763     // Check for request ID, and if present, signal waiters.
764     camera_metadata_entry_t requestId;
765     res = find_camera_metadata_entry(b,
766             ANDROID_REQUEST_ID,
767             &requestId);
768     if (res == OK) {
769         mLatestRequestId = requestId.data.i32[0];
770         mNewRequestId.signal();
771     }
772 
773     *buf = b;
774     mCount--;
775 
776     return OK;
777 }
778 
waitForBuffer(nsecs_t timeout)779 status_t Camera2Device::MetadataQueue::waitForBuffer(nsecs_t timeout)
780 {
781     Mutex::Autolock l(mMutex);
782     status_t res;
783     while (mCount == 0) {
784         res = notEmpty.waitRelative(mMutex,timeout);
785         if (res != OK) return res;
786     }
787     return OK;
788 }
789 
waitForDequeue(int32_t id,nsecs_t timeout)790 status_t Camera2Device::MetadataQueue::waitForDequeue(int32_t id,
791         nsecs_t timeout) {
792     Mutex::Autolock l(mMutex);
793     status_t res;
794     while (mLatestRequestId != id) {
795         nsecs_t startTime = systemTime();
796 
797         res = mNewRequestId.waitRelative(mMutex, timeout);
798         if (res != OK) return res;
799 
800         timeout -= (systemTime() - startTime);
801     }
802 
803     return OK;
804 }
805 
setStreamSlot(camera_metadata_t * buf)806 status_t Camera2Device::MetadataQueue::setStreamSlot(camera_metadata_t *buf)
807 {
808     ATRACE_CALL();
809     ALOGV("%s: E", __FUNCTION__);
810     Mutex::Autolock l(mMutex);
811     if (buf == NULL) {
812         freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
813         mStreamSlotCount = 0;
814         return OK;
815     }
816 
817     if (mStreamSlotCount > 1) {
818         List<camera_metadata_t*>::iterator deleter = ++mStreamSlot.begin();
819         freeBuffers(++mStreamSlot.begin(), mStreamSlot.end());
820         mStreamSlotCount = 1;
821     }
822     if (mStreamSlotCount == 1) {
823         free_camera_metadata( *(mStreamSlot.begin()) );
824         *(mStreamSlot.begin()) = buf;
825     } else {
826         mStreamSlot.push_front(buf);
827         mStreamSlotCount = 1;
828     }
829     return signalConsumerLocked();
830 }
831 
setStreamSlot(const List<camera_metadata_t * > & bufs)832 status_t Camera2Device::MetadataQueue::setStreamSlot(
833         const List<camera_metadata_t*> &bufs)
834 {
835     ATRACE_CALL();
836     ALOGV("%s: E", __FUNCTION__);
837     Mutex::Autolock l(mMutex);
838 
839     if (mStreamSlotCount > 0) {
840         freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
841     }
842     mStreamSlotCount = 0;
843     for (List<camera_metadata_t*>::const_iterator r = bufs.begin();
844          r != bufs.end(); r++) {
845         mStreamSlot.push_back(*r);
846         mStreamSlotCount++;
847     }
848     return signalConsumerLocked();
849 }
850 
clear()851 status_t Camera2Device::MetadataQueue::clear()
852 {
853     ATRACE_CALL();
854     ALOGV("%s: E", __FUNCTION__);
855 
856     Mutex::Autolock l(mMutex);
857 
858     // Clear streaming slot
859     freeBuffers(mStreamSlot.begin(), mStreamSlot.end());
860     mStreamSlotCount = 0;
861 
862     // Clear request queue
863     freeBuffers(mEntries.begin(), mEntries.end());
864     mCount = 0;
865     return OK;
866 }
867 
dump(int fd,const Vector<String16> &)868 status_t Camera2Device::MetadataQueue::dump(int fd,
869         const Vector<String16>& /*args*/) {
870     ATRACE_CALL();
871     String8 result;
872     status_t notLocked;
873     notLocked = mMutex.tryLock();
874     if (notLocked) {
875         result.append("    (Unable to lock queue mutex)\n");
876     }
877     result.appendFormat("      Current frame number: %d\n", mFrameCount);
878     if (mStreamSlotCount == 0) {
879         result.append("      Stream slot: Empty\n");
880         write(fd, result.string(), result.size());
881     } else {
882         result.appendFormat("      Stream slot: %zu entries\n",
883                 mStreamSlot.size());
884         int i = 0;
885         for (List<camera_metadata_t*>::iterator r = mStreamSlot.begin();
886              r != mStreamSlot.end(); r++) {
887             result = String8::format("       Stream slot buffer %d:\n", i);
888             write(fd, result.string(), result.size());
889             dump_indented_camera_metadata(*r, fd, 2, 10);
890             i++;
891         }
892     }
893     if (mEntries.size() == 0) {
894         result = "      Main queue is empty\n";
895         write(fd, result.string(), result.size());
896     } else {
897         result = String8::format("      Main queue has %zu entries:\n",
898                 mEntries.size());
899         int i = 0;
900         for (List<camera_metadata_t*>::iterator r = mEntries.begin();
901              r != mEntries.end(); r++) {
902             result = String8::format("       Queue entry %d:\n", i);
903             write(fd, result.string(), result.size());
904             dump_indented_camera_metadata(*r, fd, 2, 10);
905             i++;
906         }
907     }
908 
909     if (notLocked == 0) {
910         mMutex.unlock();
911     }
912 
913     return OK;
914 }
915 
signalConsumerLocked()916 status_t Camera2Device::MetadataQueue::signalConsumerLocked() {
917     ATRACE_CALL();
918     status_t res = OK;
919     notEmpty.signal();
920     if (mSignalConsumer && mHal2Device != NULL) {
921         mSignalConsumer = false;
922 
923         mMutex.unlock();
924         ALOGV("%s: Signaling consumer", __FUNCTION__);
925         res = mHal2Device->ops->notify_request_queue_not_empty(mHal2Device);
926         mMutex.lock();
927     }
928     return res;
929 }
930 
freeBuffers(List<camera_metadata_t * >::iterator start,List<camera_metadata_t * >::iterator end)931 status_t Camera2Device::MetadataQueue::freeBuffers(
932         List<camera_metadata_t*>::iterator start,
933         List<camera_metadata_t*>::iterator end)
934 {
935     ATRACE_CALL();
936     while (start != end) {
937         free_camera_metadata(*start);
938         start = mStreamSlot.erase(start);
939     }
940     return OK;
941 }
942 
getInstance(const camera2_request_queue_src_ops_t * q)943 Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance(
944         const camera2_request_queue_src_ops_t *q)
945 {
946     const MetadataQueue* cmq = static_cast<const MetadataQueue*>(q);
947     return const_cast<MetadataQueue*>(cmq);
948 }
949 
getInstance(const camera2_frame_queue_dst_ops_t * q)950 Camera2Device::MetadataQueue* Camera2Device::MetadataQueue::getInstance(
951         const camera2_frame_queue_dst_ops_t *q)
952 {
953     const MetadataQueue* cmq = static_cast<const MetadataQueue*>(q);
954     return const_cast<MetadataQueue*>(cmq);
955 }
956 
consumer_buffer_count(const camera2_request_queue_src_ops_t * q)957 int Camera2Device::MetadataQueue::consumer_buffer_count(
958         const camera2_request_queue_src_ops_t *q)
959 {
960     MetadataQueue *queue = getInstance(q);
961     return queue->getBufferCount();
962 }
963 
consumer_dequeue(const camera2_request_queue_src_ops_t * q,camera_metadata_t ** buffer)964 int Camera2Device::MetadataQueue::consumer_dequeue(
965         const camera2_request_queue_src_ops_t *q,
966         camera_metadata_t **buffer)
967 {
968     MetadataQueue *queue = getInstance(q);
969     return queue->dequeue(buffer, true);
970 }
971 
consumer_free(const camera2_request_queue_src_ops_t * q,camera_metadata_t * old_buffer)972 int Camera2Device::MetadataQueue::consumer_free(
973         const camera2_request_queue_src_ops_t *q,
974         camera_metadata_t *old_buffer)
975 {
976     ATRACE_CALL();
977     MetadataQueue *queue = getInstance(q);
978     (void)queue;
979     free_camera_metadata(old_buffer);
980     return OK;
981 }
982 
producer_dequeue(const camera2_frame_queue_dst_ops_t *,size_t entries,size_t bytes,camera_metadata_t ** buffer)983 int Camera2Device::MetadataQueue::producer_dequeue(
984         const camera2_frame_queue_dst_ops_t * /*q*/,
985         size_t entries, size_t bytes,
986         camera_metadata_t **buffer)
987 {
988     ATRACE_CALL();
989     camera_metadata_t *new_buffer =
990             allocate_camera_metadata(entries, bytes);
991     if (new_buffer == NULL) return NO_MEMORY;
992     *buffer = new_buffer;
993         return OK;
994 }
995 
producer_cancel(const camera2_frame_queue_dst_ops_t *,camera_metadata_t * old_buffer)996 int Camera2Device::MetadataQueue::producer_cancel(
997         const camera2_frame_queue_dst_ops_t * /*q*/,
998         camera_metadata_t *old_buffer)
999 {
1000     ATRACE_CALL();
1001     free_camera_metadata(old_buffer);
1002     return OK;
1003 }
1004 
producer_enqueue(const camera2_frame_queue_dst_ops_t * q,camera_metadata_t * filled_buffer)1005 int Camera2Device::MetadataQueue::producer_enqueue(
1006         const camera2_frame_queue_dst_ops_t *q,
1007         camera_metadata_t *filled_buffer)
1008 {
1009     MetadataQueue *queue = getInstance(q);
1010     return queue->enqueue(filled_buffer);
1011 }
1012 
1013 /**
1014  * Camera2Device::StreamAdapter
1015  */
1016 
1017 #ifndef container_of
1018 #define container_of(ptr, type, member) \
1019     (type *)((char*)(ptr) - offsetof(type, member))
1020 #endif
1021 
StreamAdapter(camera2_device_t * d)1022 Camera2Device::StreamAdapter::StreamAdapter(camera2_device_t *d):
1023         mState(RELEASED),
1024         mHal2Device(d),
1025         mId(-1),
1026         mWidth(0), mHeight(0), mFormat(0), mSize(0), mUsage(0),
1027         mMaxProducerBuffers(0), mMaxConsumerBuffers(0),
1028         mTotalBuffers(0),
1029         mFormatRequested(0),
1030         mActiveBuffers(0),
1031         mFrameCount(0),
1032         mLastTimestamp(0)
1033 {
1034     camera2_stream_ops::dequeue_buffer = dequeue_buffer;
1035     camera2_stream_ops::enqueue_buffer = enqueue_buffer;
1036     camera2_stream_ops::cancel_buffer = cancel_buffer;
1037     camera2_stream_ops::set_crop = set_crop;
1038 }
1039 
~StreamAdapter()1040 Camera2Device::StreamAdapter::~StreamAdapter() {
1041     ATRACE_CALL();
1042     if (mState != RELEASED) {
1043         release();
1044     }
1045 }
1046 
connectToDevice(sp<ANativeWindow> consumer,uint32_t width,uint32_t height,int format,size_t size)1047 status_t Camera2Device::StreamAdapter::connectToDevice(
1048         sp<ANativeWindow> consumer,
1049         uint32_t width, uint32_t height, int format, size_t size) {
1050     ATRACE_CALL();
1051     status_t res;
1052     ALOGV("%s: E", __FUNCTION__);
1053 
1054     if (mState != RELEASED) return INVALID_OPERATION;
1055     if (consumer == NULL) {
1056         ALOGE("%s: Null consumer passed to stream adapter", __FUNCTION__);
1057         return BAD_VALUE;
1058     }
1059 
1060     ALOGV("%s: New stream parameters %d x %d, format 0x%x, size %zu",
1061             __FUNCTION__, width, height, format, size);
1062 
1063     mConsumerInterface = consumer;
1064     mWidth = width;
1065     mHeight = height;
1066     mSize = (format == HAL_PIXEL_FORMAT_BLOB) ? size : 0;
1067     mFormatRequested = format;
1068 
1069     // Allocate device-side stream interface
1070 
1071     uint32_t id;
1072     uint32_t formatActual;
1073     uint32_t usage;
1074     uint32_t maxBuffers = 2;
1075     res = mHal2Device->ops->allocate_stream(mHal2Device,
1076             mWidth, mHeight, mFormatRequested, getStreamOps(),
1077             &id, &formatActual, &usage, &maxBuffers);
1078     if (res != OK) {
1079         ALOGE("%s: Device stream allocation failed: %s (%d)",
1080                 __FUNCTION__, strerror(-res), res);
1081         return res;
1082     }
1083 
1084     ALOGV("%s: Allocated stream id %d, actual format 0x%x, "
1085             "usage 0x%x, producer wants %d buffers", __FUNCTION__,
1086             id, formatActual, usage, maxBuffers);
1087 
1088     mId = id;
1089     mFormat = formatActual;
1090     mUsage = usage;
1091     mMaxProducerBuffers = maxBuffers;
1092 
1093     mState = ALLOCATED;
1094 
1095     // Configure consumer-side ANativeWindow interface
1096     res = native_window_api_connect(mConsumerInterface.get(),
1097             NATIVE_WINDOW_API_CAMERA);
1098     if (res != OK) {
1099         ALOGE("%s: Unable to connect to native window for stream %d",
1100                 __FUNCTION__, mId);
1101 
1102         return res;
1103     }
1104 
1105     mState = CONNECTED;
1106 
1107     res = native_window_set_usage(mConsumerInterface.get(), mUsage);
1108     if (res != OK) {
1109         ALOGE("%s: Unable to configure usage %08x for stream %d",
1110                 __FUNCTION__, mUsage, mId);
1111         return res;
1112     }
1113 
1114     res = native_window_set_scaling_mode(mConsumerInterface.get(),
1115             NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
1116     if (res != OK) {
1117         ALOGE("%s: Unable to configure stream scaling: %s (%d)",
1118                 __FUNCTION__, strerror(-res), res);
1119         return res;
1120     }
1121 
1122     res = setTransform(0);
1123     if (res != OK) {
1124         return res;
1125     }
1126 
1127     if (mFormat == HAL_PIXEL_FORMAT_BLOB) {
1128         res = native_window_set_buffers_dimensions(mConsumerInterface.get(),
1129                 mSize, 1);
1130         if (res != OK) {
1131             ALOGE("%s: Unable to configure compressed stream buffer dimensions"
1132                     " %d x %d, size %zu for stream %d",
1133                     __FUNCTION__, mWidth, mHeight, mSize, mId);
1134             return res;
1135         }
1136     } else {
1137         res = native_window_set_buffers_dimensions(mConsumerInterface.get(),
1138                 mWidth, mHeight);
1139         if (res != OK) {
1140             ALOGE("%s: Unable to configure stream buffer dimensions"
1141                     " %d x %d for stream %d",
1142                     __FUNCTION__, mWidth, mHeight, mId);
1143             return res;
1144         }
1145     }
1146 
1147     res = native_window_set_buffers_format(mConsumerInterface.get(), mFormat);
1148     if (res != OK) {
1149         ALOGE("%s: Unable to configure stream buffer format"
1150                 " %#x for stream %d",
1151                 __FUNCTION__, mFormat, mId);
1152         return res;
1153     }
1154 
1155     int maxConsumerBuffers;
1156     res = mConsumerInterface->query(mConsumerInterface.get(),
1157             NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &maxConsumerBuffers);
1158     if (res != OK) {
1159         ALOGE("%s: Unable to query consumer undequeued"
1160                 " buffer count for stream %d", __FUNCTION__, mId);
1161         return res;
1162     }
1163     mMaxConsumerBuffers = maxConsumerBuffers;
1164 
1165     ALOGV("%s: Consumer wants %d buffers", __FUNCTION__,
1166             mMaxConsumerBuffers);
1167 
1168     mTotalBuffers = mMaxConsumerBuffers + mMaxProducerBuffers;
1169     mActiveBuffers = 0;
1170     mFrameCount = 0;
1171     mLastTimestamp = 0;
1172 
1173     res = native_window_set_buffer_count(mConsumerInterface.get(),
1174             mTotalBuffers);
1175     if (res != OK) {
1176         ALOGE("%s: Unable to set buffer count for stream %d",
1177                 __FUNCTION__, mId);
1178         return res;
1179     }
1180 
1181     // Register allocated buffers with HAL device
1182     buffer_handle_t *buffers = new buffer_handle_t[mTotalBuffers];
1183     ANativeWindowBuffer **anwBuffers = new ANativeWindowBuffer*[mTotalBuffers];
1184     uint32_t bufferIdx = 0;
1185     for (; bufferIdx < mTotalBuffers; bufferIdx++) {
1186         res = native_window_dequeue_buffer_and_wait(mConsumerInterface.get(),
1187                 &anwBuffers[bufferIdx]);
1188         if (res != OK) {
1189             ALOGE("%s: Unable to dequeue buffer %d for initial registration for "
1190                     "stream %d", __FUNCTION__, bufferIdx, mId);
1191             goto cleanUpBuffers;
1192         }
1193 
1194         buffers[bufferIdx] = anwBuffers[bufferIdx]->handle;
1195         ALOGV("%s: Buffer %p allocated", __FUNCTION__, (void*)buffers[bufferIdx]);
1196     }
1197 
1198     ALOGV("%s: Registering %d buffers with camera HAL", __FUNCTION__, mTotalBuffers);
1199     res = mHal2Device->ops->register_stream_buffers(mHal2Device,
1200             mId,
1201             mTotalBuffers,
1202             buffers);
1203     if (res != OK) {
1204         ALOGE("%s: Unable to register buffers with HAL device for stream %d",
1205                 __FUNCTION__, mId);
1206     } else {
1207         mState = ACTIVE;
1208     }
1209 
1210 cleanUpBuffers:
1211     ALOGV("%s: Cleaning up %d buffers", __FUNCTION__, bufferIdx);
1212     for (uint32_t i = 0; i < bufferIdx; i++) {
1213         res = mConsumerInterface->cancelBuffer(mConsumerInterface.get(),
1214                 anwBuffers[i], -1);
1215         if (res != OK) {
1216             ALOGE("%s: Unable to cancel buffer %d after registration",
1217                     __FUNCTION__, i);
1218         }
1219     }
1220     delete[] anwBuffers;
1221     delete[] buffers;
1222 
1223     return res;
1224 }
1225 
release()1226 status_t Camera2Device::StreamAdapter::release() {
1227     ATRACE_CALL();
1228     status_t res;
1229     ALOGV("%s: Releasing stream %d (%d x %d, format %d)", __FUNCTION__, mId,
1230             mWidth, mHeight, mFormat);
1231     if (mState >= ALLOCATED) {
1232         res = mHal2Device->ops->release_stream(mHal2Device, mId);
1233         if (res != OK) {
1234             ALOGE("%s: Unable to release stream %d",
1235                     __FUNCTION__, mId);
1236             return res;
1237         }
1238     }
1239     if (mState >= CONNECTED) {
1240         res = native_window_api_disconnect(mConsumerInterface.get(),
1241                 NATIVE_WINDOW_API_CAMERA);
1242 
1243         /* this is not an error. if client calling process dies,
1244            the window will also die and all calls to it will return
1245            DEAD_OBJECT, thus it's already "disconnected" */
1246         if (res == DEAD_OBJECT) {
1247             ALOGW("%s: While disconnecting stream %d from native window, the"
1248                   " native window died from under us", __FUNCTION__, mId);
1249         }
1250         else if (res != OK) {
1251             ALOGE("%s: Unable to disconnect stream %d from native window (error %d %s)",
1252                     __FUNCTION__, mId, res, strerror(-res));
1253             return res;
1254         }
1255     }
1256     mId = -1;
1257     mState = RELEASED;
1258     return OK;
1259 }
1260 
setTransform(int transform)1261 status_t Camera2Device::StreamAdapter::setTransform(int transform) {
1262     ATRACE_CALL();
1263     status_t res;
1264     if (mState < CONNECTED) {
1265         ALOGE("%s: Cannot set transform on unconnected stream", __FUNCTION__);
1266         return INVALID_OPERATION;
1267     }
1268     res = native_window_set_buffers_transform(mConsumerInterface.get(),
1269                                               transform);
1270     if (res != OK) {
1271         ALOGE("%s: Unable to configure stream transform to %x: %s (%d)",
1272                 __FUNCTION__, transform, strerror(-res), res);
1273     }
1274     return res;
1275 }
1276 
dump(int fd,const Vector<String16> &)1277 status_t Camera2Device::StreamAdapter::dump(int fd,
1278         const Vector<String16>& /*args*/) {
1279     ATRACE_CALL();
1280     String8 result = String8::format("      Stream %d: %d x %d, format 0x%x\n",
1281             mId, mWidth, mHeight, mFormat);
1282     result.appendFormat("        size %zu, usage 0x%x, requested format 0x%x\n",
1283             mSize, mUsage, mFormatRequested);
1284     result.appendFormat("        total buffers: %d, dequeued buffers: %d\n",
1285             mTotalBuffers, mActiveBuffers);
1286     result.appendFormat("        frame count: %d, last timestamp %" PRId64 "\n",
1287             mFrameCount, mLastTimestamp);
1288     write(fd, result.string(), result.size());
1289     return OK;
1290 }
1291 
getStreamOps()1292 const camera2_stream_ops *Camera2Device::StreamAdapter::getStreamOps() {
1293     return static_cast<camera2_stream_ops *>(this);
1294 }
1295 
toANW(const camera2_stream_ops_t * w)1296 ANativeWindow* Camera2Device::StreamAdapter::toANW(
1297         const camera2_stream_ops_t *w) {
1298     return static_cast<const StreamAdapter*>(w)->mConsumerInterface.get();
1299 }
1300 
dequeue_buffer(const camera2_stream_ops_t * w,buffer_handle_t ** buffer)1301 int Camera2Device::StreamAdapter::dequeue_buffer(const camera2_stream_ops_t *w,
1302         buffer_handle_t** buffer) {
1303     ATRACE_CALL();
1304     int res;
1305     StreamAdapter* stream =
1306             const_cast<StreamAdapter*>(static_cast<const StreamAdapter*>(w));
1307     if (stream->mState != ACTIVE) {
1308         ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
1309         return INVALID_OPERATION;
1310     }
1311 
1312     ANativeWindow *a = toANW(w);
1313     ANativeWindowBuffer* anb;
1314     res = native_window_dequeue_buffer_and_wait(a, &anb);
1315     if (res != OK) {
1316         ALOGE("Stream %d dequeue: Error from native_window: %s (%d)", stream->mId,
1317                 strerror(-res), res);
1318         return res;
1319     }
1320 
1321     *buffer = &(anb->handle);
1322     stream->mActiveBuffers++;
1323 
1324     ALOGVV("Stream %d dequeue: Buffer %p dequeued", stream->mId, (void*)(**buffer));
1325     return res;
1326 }
1327 
enqueue_buffer(const camera2_stream_ops_t * w,int64_t timestamp,buffer_handle_t * buffer)1328 int Camera2Device::StreamAdapter::enqueue_buffer(const camera2_stream_ops_t* w,
1329         int64_t timestamp,
1330         buffer_handle_t* buffer) {
1331     ATRACE_CALL();
1332     StreamAdapter *stream =
1333             const_cast<StreamAdapter*>(static_cast<const StreamAdapter*>(w));
1334     stream->mFrameCount++;
1335     ALOGVV("Stream %d enqueue: Frame %d (%p) captured at %lld ns",
1336             stream->mId, stream->mFrameCount, (void*)(*buffer), timestamp);
1337     int state = stream->mState;
1338     if (state != ACTIVE) {
1339         ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
1340         return INVALID_OPERATION;
1341     }
1342     ANativeWindow *a = toANW(w);
1343     status_t err;
1344 
1345     err = native_window_set_buffers_timestamp(a, timestamp);
1346     if (err != OK) {
1347         ALOGE("%s: Error setting timestamp on native window: %s (%d)",
1348                 __FUNCTION__, strerror(-err), err);
1349         return err;
1350     }
1351     err = a->queueBuffer(a,
1352             container_of(buffer, ANativeWindowBuffer, handle), -1);
1353     if (err != OK) {
1354         ALOGE("%s: Error queueing buffer to native window: %s (%d)",
1355                 __FUNCTION__, strerror(-err), err);
1356         return err;
1357     }
1358 
1359     stream->mActiveBuffers--;
1360     stream->mLastTimestamp = timestamp;
1361     return OK;
1362 }
1363 
cancel_buffer(const camera2_stream_ops_t * w,buffer_handle_t * buffer)1364 int Camera2Device::StreamAdapter::cancel_buffer(const camera2_stream_ops_t* w,
1365         buffer_handle_t* buffer) {
1366     ATRACE_CALL();
1367     StreamAdapter *stream =
1368             const_cast<StreamAdapter*>(static_cast<const StreamAdapter*>(w));
1369     ALOGVV("Stream %d cancel: Buffer %p",
1370             stream->mId, (void*)(*buffer));
1371     if (stream->mState != ACTIVE) {
1372         ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
1373         return INVALID_OPERATION;
1374     }
1375 
1376     ANativeWindow *a = toANW(w);
1377     int err = a->cancelBuffer(a,
1378             container_of(buffer, ANativeWindowBuffer, handle), -1);
1379     if (err != OK) {
1380         ALOGE("%s: Error canceling buffer to native window: %s (%d)",
1381                 __FUNCTION__, strerror(-err), err);
1382         return err;
1383     }
1384 
1385     stream->mActiveBuffers--;
1386     return OK;
1387 }
1388 
set_crop(const camera2_stream_ops_t * w,int left,int top,int right,int bottom)1389 int Camera2Device::StreamAdapter::set_crop(const camera2_stream_ops_t* w,
1390         int left, int top, int right, int bottom) {
1391     ATRACE_CALL();
1392     int state = static_cast<const StreamAdapter*>(w)->mState;
1393     if (state != ACTIVE) {
1394         ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
1395         return INVALID_OPERATION;
1396     }
1397     ANativeWindow *a = toANW(w);
1398     android_native_rect_t crop = { left, top, right, bottom };
1399     return native_window_set_crop(a, &crop);
1400 }
1401 
1402 /**
1403  * Camera2Device::ReprocessStreamAdapter
1404  */
1405 
1406 #ifndef container_of
1407 #define container_of(ptr, type, member) \
1408     (type *)((char*)(ptr) - offsetof(type, member))
1409 #endif
1410 
ReprocessStreamAdapter(camera2_device_t * d)1411 Camera2Device::ReprocessStreamAdapter::ReprocessStreamAdapter(camera2_device_t *d):
1412         mState(RELEASED),
1413         mHal2Device(d),
1414         mId(-1),
1415         mWidth(0), mHeight(0), mFormat(0),
1416         mActiveBuffers(0),
1417         mFrameCount(0)
1418 {
1419     ATRACE_CALL();
1420     camera2_stream_in_ops::acquire_buffer = acquire_buffer;
1421     camera2_stream_in_ops::release_buffer = release_buffer;
1422 }
1423 
~ReprocessStreamAdapter()1424 Camera2Device::ReprocessStreamAdapter::~ReprocessStreamAdapter() {
1425     ATRACE_CALL();
1426     if (mState != RELEASED) {
1427         release();
1428     }
1429 }
1430 
connectToDevice(const sp<StreamAdapter> & outputStream)1431 status_t Camera2Device::ReprocessStreamAdapter::connectToDevice(
1432         const sp<StreamAdapter> &outputStream) {
1433     ATRACE_CALL();
1434     status_t res;
1435     ALOGV("%s: E", __FUNCTION__);
1436 
1437     if (mState != RELEASED) return INVALID_OPERATION;
1438     if (outputStream == NULL) {
1439         ALOGE("%s: Null base stream passed to reprocess stream adapter",
1440                 __FUNCTION__);
1441         return BAD_VALUE;
1442     }
1443 
1444     mBaseStream = outputStream;
1445     mWidth = outputStream->getWidth();
1446     mHeight = outputStream->getHeight();
1447     mFormat = outputStream->getFormat();
1448 
1449     ALOGV("%s: New reprocess stream parameters %d x %d, format 0x%x",
1450             __FUNCTION__, mWidth, mHeight, mFormat);
1451 
1452     // Allocate device-side stream interface
1453 
1454     uint32_t id;
1455     res = mHal2Device->ops->allocate_reprocess_stream_from_stream(mHal2Device,
1456             outputStream->getId(), getStreamOps(),
1457             &id);
1458     if (res != OK) {
1459         ALOGE("%s: Device reprocess stream allocation failed: %s (%d)",
1460                 __FUNCTION__, strerror(-res), res);
1461         return res;
1462     }
1463 
1464     ALOGV("%s: Allocated reprocess stream id %d based on stream %d",
1465             __FUNCTION__, id, outputStream->getId());
1466 
1467     mId = id;
1468 
1469     mState = ACTIVE;
1470 
1471     return OK;
1472 }
1473 
release()1474 status_t Camera2Device::ReprocessStreamAdapter::release() {
1475     ATRACE_CALL();
1476     status_t res;
1477     ALOGV("%s: Releasing stream %d", __FUNCTION__, mId);
1478     if (mState >= ACTIVE) {
1479         res = mHal2Device->ops->release_reprocess_stream(mHal2Device, mId);
1480         if (res != OK) {
1481             ALOGE("%s: Unable to release stream %d",
1482                     __FUNCTION__, mId);
1483             return res;
1484         }
1485     }
1486 
1487     List<QueueEntry>::iterator s;
1488     for (s = mQueue.begin(); s != mQueue.end(); s++) {
1489         sp<BufferReleasedListener> listener = s->releaseListener.promote();
1490         if (listener != 0) listener->onBufferReleased(s->handle);
1491     }
1492     for (s = mInFlightQueue.begin(); s != mInFlightQueue.end(); s++) {
1493         sp<BufferReleasedListener> listener = s->releaseListener.promote();
1494         if (listener != 0) listener->onBufferReleased(s->handle);
1495     }
1496     mQueue.clear();
1497     mInFlightQueue.clear();
1498 
1499     mState = RELEASED;
1500     return OK;
1501 }
1502 
pushIntoStream(buffer_handle_t * handle,const wp<BufferReleasedListener> & releaseListener)1503 status_t Camera2Device::ReprocessStreamAdapter::pushIntoStream(
1504     buffer_handle_t *handle, const wp<BufferReleasedListener> &releaseListener) {
1505     ATRACE_CALL();
1506     // TODO: Some error checking here would be nice
1507     ALOGV("%s: Pushing buffer %p to stream", __FUNCTION__, (void*)(*handle));
1508 
1509     QueueEntry entry;
1510     entry.handle = handle;
1511     entry.releaseListener = releaseListener;
1512     mQueue.push_back(entry);
1513     return OK;
1514 }
1515 
dump(int fd,const Vector<String16> &)1516 status_t Camera2Device::ReprocessStreamAdapter::dump(int fd,
1517         const Vector<String16>& /*args*/) {
1518     ATRACE_CALL();
1519     String8 result =
1520             String8::format("      Reprocess stream %d: %d x %d, fmt 0x%x\n",
1521                     mId, mWidth, mHeight, mFormat);
1522     result.appendFormat("        acquired buffers: %d\n",
1523             mActiveBuffers);
1524     result.appendFormat("        frame count: %d\n",
1525             mFrameCount);
1526     write(fd, result.string(), result.size());
1527     return OK;
1528 }
1529 
getStreamOps()1530 const camera2_stream_in_ops *Camera2Device::ReprocessStreamAdapter::getStreamOps() {
1531     return static_cast<camera2_stream_in_ops *>(this);
1532 }
1533 
acquire_buffer(const camera2_stream_in_ops_t * w,buffer_handle_t ** buffer)1534 int Camera2Device::ReprocessStreamAdapter::acquire_buffer(
1535     const camera2_stream_in_ops_t *w,
1536         buffer_handle_t** buffer) {
1537     ATRACE_CALL();
1538 
1539     ReprocessStreamAdapter* stream =
1540             const_cast<ReprocessStreamAdapter*>(
1541                 static_cast<const ReprocessStreamAdapter*>(w));
1542     if (stream->mState != ACTIVE) {
1543         ALOGE("%s: Called when in bad state: %d", __FUNCTION__, stream->mState);
1544         return INVALID_OPERATION;
1545     }
1546 
1547     if (stream->mQueue.empty()) {
1548         *buffer = NULL;
1549         return OK;
1550     }
1551 
1552     QueueEntry &entry = *(stream->mQueue.begin());
1553 
1554     *buffer = entry.handle;
1555 
1556     stream->mInFlightQueue.push_back(entry);
1557     stream->mQueue.erase(stream->mQueue.begin());
1558 
1559     stream->mActiveBuffers++;
1560 
1561     ALOGV("Stream %d acquire: Buffer %p acquired", stream->mId,
1562             (void*)(**buffer));
1563     return OK;
1564 }
1565 
release_buffer(const camera2_stream_in_ops_t * w,buffer_handle_t * buffer)1566 int Camera2Device::ReprocessStreamAdapter::release_buffer(
1567     const camera2_stream_in_ops_t* w,
1568     buffer_handle_t* buffer) {
1569     ATRACE_CALL();
1570     ReprocessStreamAdapter *stream =
1571             const_cast<ReprocessStreamAdapter*>(
1572                 static_cast<const ReprocessStreamAdapter*>(w) );
1573     stream->mFrameCount++;
1574     ALOGV("Reprocess stream %d release: Frame %d (%p)",
1575             stream->mId, stream->mFrameCount, (void*)*buffer);
1576     int state = stream->mState;
1577     if (state != ACTIVE) {
1578         ALOGE("%s: Called when in bad state: %d", __FUNCTION__, state);
1579         return INVALID_OPERATION;
1580     }
1581     stream->mActiveBuffers--;
1582 
1583     List<QueueEntry>::iterator s;
1584     for (s = stream->mInFlightQueue.begin(); s != stream->mInFlightQueue.end(); s++) {
1585         if ( s->handle == buffer ) break;
1586     }
1587     if (s == stream->mInFlightQueue.end()) {
1588         ALOGE("%s: Can't find buffer %p in in-flight list!", __FUNCTION__,
1589                 buffer);
1590         return INVALID_OPERATION;
1591     }
1592 
1593     sp<BufferReleasedListener> listener = s->releaseListener.promote();
1594     if (listener != 0) {
1595         listener->onBufferReleased(s->handle);
1596     } else {
1597         ALOGE("%s: Can't free buffer - missing listener", __FUNCTION__);
1598     }
1599     stream->mInFlightQueue.erase(s);
1600 
1601     return OK;
1602 }
1603 
1604 // camera 2 devices don't support reprocessing
createInputStream(uint32_t width,uint32_t height,int format,int * id)1605 status_t Camera2Device::createInputStream(
1606     uint32_t width, uint32_t height, int format, int *id) {
1607     ALOGE("%s: camera 2 devices don't support reprocessing", __FUNCTION__);
1608     return INVALID_OPERATION;
1609 }
1610 
1611 // camera 2 devices don't support reprocessing
getInputBufferProducer(sp<IGraphicBufferProducer> * producer)1612 status_t Camera2Device::getInputBufferProducer(
1613         sp<IGraphicBufferProducer> *producer) {
1614     ALOGE("%s: camera 2 devices don't support reprocessing", __FUNCTION__);
1615     return INVALID_OPERATION;
1616 }
1617 
1618 }; // namespace android
1619