1 /* 2 * Copyright (C) 2013-2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H 18 #define ANDROID_SERVERS_CAMERA3_OUTPUT_STREAM_H 19 20 #include <mutex> 21 #include <utils/RefBase.h> 22 #include <gui/IProducerListener.h> 23 #include <gui/Surface.h> 24 #include <gui/DisplayEventReceiver.h> 25 26 #include "utils/IPCTransport.h" 27 #include "utils/LatencyHistogram.h" 28 #include "Camera3Stream.h" 29 #include "Camera3IOStreamBase.h" 30 #include "Camera3OutputStreamInterface.h" 31 #include "Camera3BufferManager.h" 32 #include "PreviewFrameSpacer.h" 33 34 namespace android { 35 36 namespace camera3 { 37 38 class Camera3BufferManager; 39 40 /** 41 * Stream info structure that holds the necessary stream info for buffer manager to use for 42 * buffer allocation and management. 43 */ 44 struct StreamInfo { 45 int streamId; 46 int streamSetId; 47 uint32_t width; 48 uint32_t height; 49 uint32_t format; 50 android_dataspace dataSpace; 51 uint64_t combinedUsage; 52 size_t totalBufferCount; 53 bool isConfigured; 54 bool isMultiRes; 55 explicit StreamInfo(int id = CAMERA3_STREAM_ID_INVALID, 56 int setId = CAMERA3_STREAM_SET_ID_INVALID, 57 uint32_t w = 0, 58 uint32_t h = 0, 59 uint32_t fmt = 0, 60 android_dataspace ds = HAL_DATASPACE_UNKNOWN, 61 uint64_t usage = 0, 62 size_t bufferCount = 0, 63 bool configured = false, 64 bool multiRes = false) : streamIdStreamInfo65 streamId(id), 66 streamSetId(setId), 67 width(w), 68 height(h), 69 format(fmt), 70 dataSpace(ds), 71 combinedUsage(usage), 72 totalBufferCount(bufferCount), 73 isConfigured(configured), 74 isMultiRes(multiRes) {} 75 }; 76 77 /** 78 * A class for managing a single stream of output data from the camera device. 79 */ 80 class Camera3OutputStream : 81 public Camera3IOStreamBase, 82 public Camera3OutputStreamInterface { 83 public: 84 /** 85 * Set up a stream for formats that have 2 dimensions, such as RAW and YUV. 86 * A valid stream set id needs to be set to support buffer sharing between multiple 87 * streams. 88 */ 89 Camera3OutputStream(int id, sp<Surface> consumer, 90 uint32_t width, uint32_t height, int format, 91 android_dataspace dataSpace, camera_stream_rotation_t rotation, 92 nsecs_t timestampOffset, const String8& physicalCameraId, 93 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 94 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 95 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 96 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 97 bool deviceTimeBaseIsRealtime = false, 98 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 99 int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO, 100 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 101 bool useReadoutTimestamp = false); 102 /** 103 * Set up a stream for formats that have a variable buffer size for the same 104 * dimensions, such as compressed JPEG. 105 * A valid stream set id needs to be set to support buffer sharing between multiple 106 * streams. 107 */ 108 Camera3OutputStream(int id, sp<Surface> consumer, 109 uint32_t width, uint32_t height, size_t maxSize, int format, 110 android_dataspace dataSpace, camera_stream_rotation_t rotation, 111 nsecs_t timestampOffset, const String8& physicalCameraId, 112 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 113 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 114 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 115 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 116 bool deviceTimeBaseIsRealtime = false, 117 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 118 int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO, 119 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 120 bool useReadoutTimestamp = false); 121 /** 122 * Set up a stream with deferred consumer for formats that have 2 dimensions, such as 123 * RAW and YUV. The consumer must be set before using this stream for output. A valid 124 * stream set id needs to be set to support buffer sharing between multiple streams. 125 */ 126 Camera3OutputStream(int id, uint32_t width, uint32_t height, int format, 127 uint64_t consumerUsage, android_dataspace dataSpace, 128 camera_stream_rotation_t rotation, nsecs_t timestampOffset, 129 const String8& physicalCameraId, 130 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 131 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 132 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 133 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 134 bool deviceTimeBaseIsRealtime = false, 135 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 136 int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO, 137 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 138 bool useReadoutTimestamp = false); 139 140 virtual ~Camera3OutputStream(); 141 142 /** 143 * Camera3Stream interface 144 */ 145 146 virtual void dump(int fd, const Vector<String16> &args) const; 147 148 /** 149 * Set the transform on the output stream; one of the 150 * HAL_TRANSFORM_* / NATIVE_WINDOW_TRANSFORM_* constants. 151 */ 152 status_t setTransform(int transform, bool mayChangeMirror); 153 154 /** 155 * Return if this output stream is for video encoding. 156 */ 157 bool isVideoStream() const; 158 /** 159 * Return if this output stream is consumed by hardware composer. 160 */ 161 bool isConsumedByHWComposer() const; 162 163 /** 164 * Return if this output stream is consumed by hardware texture. 165 */ 166 bool isConsumedByHWTexture() const; 167 168 /** 169 * Return if this output stream is consumed by CPU. 170 */ 171 bool isConsumedByCPU() const; 172 173 /** 174 * Return if the consumer configuration of this stream is deferred. 175 */ 176 virtual bool isConsumerConfigurationDeferred(size_t surface_id) const; 177 178 /** 179 * Set the consumer surfaces to the output stream. 180 */ 181 virtual status_t setConsumers(const std::vector<sp<Surface>>& consumers); 182 183 class BufferProducerListener : public SurfaceListener { 184 public: BufferProducerListener(wp<Camera3OutputStream> parent,bool needsReleaseNotify)185 BufferProducerListener(wp<Camera3OutputStream> parent, bool needsReleaseNotify) 186 : mParent(parent), mNeedsReleaseNotify(needsReleaseNotify) {} 187 188 /** 189 * Implementation of IProducerListener, used to notify this stream that the consumer 190 * has returned a buffer and it is ready to return to Camera3BufferManager for reuse. 191 */ 192 virtual void onBufferReleased(); needsReleaseNotify()193 virtual bool needsReleaseNotify() { return mNeedsReleaseNotify; } 194 virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& buffers); 195 196 private: 197 wp<Camera3OutputStream> mParent; 198 bool mNeedsReleaseNotify; 199 }; 200 201 virtual status_t detachBuffer(sp<GraphicBuffer>* buffer, int* fenceFd); 202 203 /** 204 * Notify that the buffer is being released to the buffer queue instead of 205 * being queued to the consumer. 206 */ 207 virtual status_t notifyBufferReleased(ANativeWindowBuffer *anwBuffer); 208 209 /** 210 * Drop buffers if dropping is true. If dropping is false, do not drop buffers. 211 */ 212 virtual status_t dropBuffers(bool dropping) override; 213 214 /** 215 * Query the physical camera id for the output stream. 216 */ 217 virtual const String8& getPhysicalCameraId() const override; 218 219 /** 220 * Set the graphic buffer manager to get/return the stream buffers. 221 * 222 * It is only legal to call this method when stream is in STATE_CONSTRUCTED state. 223 */ 224 status_t setBufferManager(sp<Camera3BufferManager> bufferManager); 225 226 /** 227 * Query the ouput surface id. 228 */ getSurfaceId(const sp<Surface> &)229 virtual ssize_t getSurfaceId(const sp<Surface> &/*surface*/) { return 0; } 230 getUniqueSurfaceIds(const std::vector<size_t> &,std::vector<size_t> *)231 virtual status_t getUniqueSurfaceIds(const std::vector<size_t>&, 232 /*out*/std::vector<size_t>*) { return INVALID_OPERATION; }; 233 234 /** 235 * Update the stream output surfaces. 236 */ 237 virtual status_t updateStream(const std::vector<sp<Surface>> &outputSurfaces, 238 const std::vector<OutputStreamInfo> &outputInfo, 239 const std::vector<size_t> &removedSurfaceIds, 240 KeyedVector<sp<Surface>, size_t> *outputMap/*out*/); 241 242 /** 243 * Set the batch size for buffer operations. The output stream will request 244 * buffers from buffer queue on a batch basis. Currently only video streams 245 * are allowed to set the batch size. Also if the stream is managed by 246 * buffer manager (Surface group in Java API) then batching is also not 247 * supported. Changing batch size on the fly while there is already batched 248 * buffers in the stream is also not supported. 249 * If the batch size is larger than the max dequeue count set 250 * by the camera HAL, the batch size will be set to the max dequeue count 251 * instead. 252 */ 253 virtual status_t setBatchSize(size_t batchSize = 1) override; 254 255 /** 256 * Notify the stream on change of min frame durations or variable/fixed 257 * frame rate. 258 */ 259 virtual void onMinDurationChanged(nsecs_t duration, bool fixedFps) override; 260 261 /** 262 * Modify stream use case 263 */ 264 virtual void setStreamUseCase(int64_t streamUseCase) override; 265 266 /** 267 * Apply ZSL related consumer usage quirk. 268 */ 269 static void applyZSLUsageQuirk(int format, uint64_t *consumerUsage /*inout*/); 270 setImageDumpMask(int mask)271 void setImageDumpMask(int mask) { mImageDumpMask = mask; } 272 bool shouldLogError(status_t res); 273 void onCachedBufferQueued(); 274 275 protected: 276 Camera3OutputStream(int id, camera_stream_type_t type, 277 uint32_t width, uint32_t height, int format, 278 android_dataspace dataSpace, camera_stream_rotation_t rotation, 279 const String8& physicalCameraId, 280 const std::unordered_set<int32_t> &sensorPixelModesUsed, IPCTransport transport, 281 uint64_t consumerUsage = 0, nsecs_t timestampOffset = 0, 282 int setId = CAMERA3_STREAM_SET_ID_INVALID, bool isMultiResolution = false, 283 int64_t dynamicProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD, 284 int64_t streamUseCase = ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT, 285 bool deviceTimeBaseIsRealtime = false, 286 int timestampBase = OutputConfiguration::TIMESTAMP_BASE_DEFAULT, 287 int mirrorMode = OutputConfiguration::MIRROR_MODE_AUTO, 288 int32_t colorSpace = ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED, 289 bool useReadoutTimestamp = false); 290 291 /** 292 * Note that we release the lock briefly in this function 293 */ 294 virtual status_t returnBufferCheckedLocked( 295 const camera_stream_buffer &buffer, 296 nsecs_t timestamp, 297 nsecs_t readoutTimestamp, 298 bool output, 299 int32_t transform, 300 const std::vector<size_t>& surface_ids, 301 /*out*/ 302 sp<Fence> *releaseFenceOut); 303 304 virtual status_t disconnectLocked(); 305 status_t fixUpHidlJpegBlobHeader(ANativeWindowBuffer* anwBuffer, int fence); 306 307 status_t getEndpointUsageForSurface(uint64_t *usage, 308 const sp<Surface>& surface) const; 309 status_t configureConsumerQueueLocked(bool allowPreviewRespace); 310 311 // Consumer as the output of camera HAL 312 sp<Surface> mConsumer; 313 getPresetConsumerUsage()314 uint64_t getPresetConsumerUsage() const { return mConsumerUsage; } 315 316 static const nsecs_t kDequeueBufferTimeout = 1000000000; // 1 sec 317 318 status_t getBufferLockedCommon(ANativeWindowBuffer** anb, int* fenceFd); 319 320 321 private: 322 323 int mTransform; 324 325 virtual status_t setTransformLocked(int transform); 326 327 bool mTraceFirstBuffer; 328 329 // Name of Surface consumer 330 String8 mConsumerName; 331 332 /** 333 * GraphicBuffer manager this stream is registered to. Used to replace the buffer 334 * allocation/deallocation role of BufferQueue. 335 */ 336 sp<Camera3BufferManager> mBufferManager; 337 338 /** 339 * Buffer producer listener, used to handle notification when a buffer is released 340 * from consumer side, or a set of buffers are discarded by the consumer. 341 */ 342 sp<BufferProducerListener> mBufferProducerListener; 343 344 /** 345 * Flag indicating if the buffer manager is used to allocate the stream buffers 346 */ 347 bool mUseBufferManager; 348 349 /** 350 * Offset used to override camera HAL produced timestamps 351 * 352 * The offset is first initialized to bootTime - monotonicTime in 353 * constructor, and may later be updated based on the client's timestampBase 354 * setting. 355 */ 356 nsecs_t mTimestampOffset; 357 358 /** 359 * If camera readout time is used rather than the start-of-exposure time. 360 */ 361 bool mUseReadoutTime; 362 363 /** 364 * Consumer end point usage flag set by the constructor for the deferred 365 * consumer case. 366 */ 367 uint64_t mConsumerUsage; 368 369 // Whether to drop valid buffers. 370 bool mDropBuffers; 371 372 373 374 // The batch size for buffer operation 375 std::atomic_size_t mBatchSize = 1; 376 377 // Protecting batch states below, must be acquired after mLock 378 std::mutex mBatchLock; 379 // Prefetched buffers (ready to be handed to client) 380 std::vector<Surface::BatchBuffer> mBatchedBuffers; 381 // ---- End of mBatchLock protected scope ---- 382 383 const int mMirrorMode; 384 385 /** 386 * Internal Camera3Stream interface 387 */ 388 virtual status_t getBufferLocked(camera_stream_buffer *buffer, 389 const std::vector<size_t>& surface_ids); 390 391 virtual status_t getBuffersLocked(/*out*/std::vector<OutstandingBuffer>* buffers) override; 392 393 virtual status_t returnBufferLocked( 394 const camera_stream_buffer &buffer, 395 nsecs_t timestamp, nsecs_t readoutTimestamp, 396 int32_t transform, const std::vector<size_t>& surface_ids); 397 398 virtual status_t queueBufferToConsumer(sp<ANativeWindow>& consumer, 399 ANativeWindowBuffer* buffer, int anwReleaseFence, 400 const std::vector<size_t>& surface_ids); 401 402 virtual status_t configureQueueLocked(); 403 404 virtual status_t getEndpointUsage(uint64_t *usage) const; 405 406 /** 407 * Private methods 408 */ 409 void onBuffersRemovedLocked(const std::vector<sp<GraphicBuffer>>&); 410 status_t detachBufferLocked(sp<GraphicBuffer>* buffer, int* fenceFd); 411 // Call this after each dequeueBuffer/attachBuffer/detachNextBuffer call to get update on 412 // removed buffers. Set notifyBufferManager to false when the call is initiated by buffer 413 // manager so buffer manager doesn't need to be notified. 414 void checkRemovedBuffersLocked(bool notifyBufferManager = true); 415 416 // Check return status of IGBP calls and set abandoned state accordingly 417 void checkRetAndSetAbandonedLocked(status_t res); 418 419 // If the status indicates abandonded stream, only log when state hasn't been updated to 420 // STATE_ABANDONED 421 static bool shouldLogError(status_t res, StreamState state); 422 423 // Dump images to disk before returning to consumer 424 void dumpImageToDisk(nsecs_t timestamp, ANativeWindowBuffer* anwBuffer, int fence); 425 426 void returnPrefetchedBuffersLocked(); 427 428 429 static const int32_t kDequeueLatencyBinSize = 5; // in ms 430 CameraLatencyHistogram mDequeueBufferLatency; 431 IPCTransport mIPCTransport = IPCTransport::INVALID; 432 433 int mImageDumpMask = 0; 434 435 // Re-space frames by overriding timestamp to align with display Vsync. 436 // Default is on for SurfaceView bound streams. 437 bool mFixedFps = false; 438 nsecs_t mMinExpectedDuration = 0; 439 bool mSyncToDisplay = false; 440 DisplayEventReceiver mDisplayEventReceiver; 441 nsecs_t mLastCaptureTime = 0; 442 nsecs_t mLastPresentTime = 0; 443 nsecs_t mCaptureToPresentOffset = 0; 444 static constexpr size_t kDisplaySyncExtraBuffer = 2; 445 static constexpr nsecs_t kSpacingResetIntervalNs = 50000000LL; // 50 millisecond 446 static constexpr nsecs_t kTimelineThresholdNs = 1000000LL; // 1 millisecond 447 static constexpr float kMaxIntervalRatioDeviation = 0.05f; 448 static constexpr int kMaxTimelines = 2; 449 nsecs_t syncTimestampToDisplayLocked(nsecs_t t, sp<Fence> releaseFence); 450 451 // In case of fence being used 452 sp<Fence> mReferenceFrameFence; 453 nsecs_t mReferenceCaptureTime = 0; 454 nsecs_t mReferenceArrivalTime = 0; 455 nsecs_t mFenceSignalOffset = 0; 456 VsyncEventData mRefVsyncData; 457 458 // Re-space frames by delaying queueBuffer so that frame delivery has 459 // the same cadence as capture. Default is on for SurfaceTexture bound 460 // streams. 461 sp<PreviewFrameSpacer> mPreviewFrameSpacer; 462 }; // class Camera3OutputStream 463 464 } // namespace camera3 465 466 } // namespace android 467 468 #endif 469