// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "media/base/video_frame.h" #include #include "base/bind.h" #include "base/callback_helpers.h" #include "base/logging.h" #include "base/memory/aligned_memory.h" #include "base/strings/string_piece.h" #include "gpu/command_buffer/common/mailbox_holder.h" #include "media/base/limits.h" #include "media/base/video_util.h" #if !defined(MEDIA_FOR_CAST_IOS) #include "third_party/skia/include/core/SkBitmap.h" #endif namespace media { static inline size_t RoundUp(size_t value, size_t alignment) { // Check that |alignment| is a power of 2. DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1))); return ((value + (alignment - 1)) & ~(alignment - 1)); } // Rounds up |coded_size| if necessary for |format|. static gfx::Size AdjustCodedSize(VideoFrame::Format format, const gfx::Size& coded_size) { gfx::Size new_coded_size(coded_size); switch (format) { case VideoFrame::YV12: case VideoFrame::YV12A: case VideoFrame::I420: case VideoFrame::YV12J: new_coded_size.set_height(RoundUp(new_coded_size.height(), 2)); // Fallthrough. case VideoFrame::YV16: new_coded_size.set_width(RoundUp(new_coded_size.width(), 2)); break; default: break; } return new_coded_size; } // static scoped_refptr VideoFrame::CreateFrame( VideoFrame::Format format, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, base::TimeDelta timestamp) { DCHECK(format != VideoFrame::UNKNOWN && format != VideoFrame::NV12 && format != VideoFrame::NATIVE_TEXTURE); #if defined(VIDEO_HOLE) DCHECK(format != VideoFrame::HOLE); #endif // defined(VIDEO_HOLE) // Since we're creating a new YUV frame (and allocating memory for it // ourselves), we can pad the requested |coded_size| if necessary if the // request does not line up on sample boundaries. gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); DCHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size)); scoped_refptr frame( new VideoFrame(format, new_coded_size, visible_rect, natural_size, scoped_ptr(), timestamp, false)); frame->AllocateYUV(); return frame; } // static std::string VideoFrame::FormatToString(VideoFrame::Format format) { switch (format) { case VideoFrame::UNKNOWN: return "UNKNOWN"; case VideoFrame::YV12: return "YV12"; case VideoFrame::YV16: return "YV16"; case VideoFrame::I420: return "I420"; case VideoFrame::NATIVE_TEXTURE: return "NATIVE_TEXTURE"; #if defined(VIDEO_HOLE) case VideoFrame::HOLE: return "HOLE"; #endif // defined(VIDEO_HOLE) case VideoFrame::YV12A: return "YV12A"; case VideoFrame::YV12J: return "YV12J"; case VideoFrame::NV12: return "NV12"; case VideoFrame::YV24: return "YV24"; } NOTREACHED() << "Invalid videoframe format provided: " << format; return ""; } // static bool VideoFrame::IsValidConfig(VideoFrame::Format format, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size) { // Check maximum limits for all formats. if (coded_size.GetArea() > limits::kMaxCanvas || coded_size.width() > limits::kMaxDimension || coded_size.height() > limits::kMaxDimension || visible_rect.x() < 0 || visible_rect.y() < 0 || visible_rect.right() > coded_size.width() || visible_rect.bottom() > coded_size.height() || natural_size.GetArea() > limits::kMaxCanvas || natural_size.width() > limits::kMaxDimension || natural_size.height() > limits::kMaxDimension) return false; // Check format-specific width/height requirements. switch (format) { case VideoFrame::UNKNOWN: return (coded_size.IsEmpty() && visible_rect.IsEmpty() && natural_size.IsEmpty()); case VideoFrame::YV24: break; case VideoFrame::YV12: case VideoFrame::YV12J: case VideoFrame::I420: case VideoFrame::YV12A: case VideoFrame::NV12: // Subsampled YUV formats have width/height requirements. if (static_cast(coded_size.height()) < RoundUp(visible_rect.bottom(), 2)) return false; // Fallthrough. case VideoFrame::YV16: if (static_cast(coded_size.width()) < RoundUp(visible_rect.right(), 2)) return false; break; case VideoFrame::NATIVE_TEXTURE: #if defined(VIDEO_HOLE) case VideoFrame::HOLE: #endif // defined(VIDEO_HOLE) // NATIVE_TEXTURE and HOLE have no software-allocated buffers and are // allowed to skip the below check and be empty. return true; } // Check that software-allocated buffer formats are not empty. return (!coded_size.IsEmpty() && !visible_rect.IsEmpty() && !natural_size.IsEmpty()); } // static scoped_refptr VideoFrame::WrapNativeTexture( scoped_ptr mailbox_holder, const ReleaseMailboxCB& mailbox_holder_release_cb, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, base::TimeDelta timestamp, const ReadPixelsCB& read_pixels_cb) { scoped_refptr frame(new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size, mailbox_holder.Pass(), timestamp, false)); frame->mailbox_holder_release_cb_ = mailbox_holder_release_cb; frame->read_pixels_cb_ = read_pixels_cb; return frame; } #if !defined(MEDIA_FOR_CAST_IOS) void VideoFrame::ReadPixelsFromNativeTexture(const SkBitmap& pixels) { DCHECK_EQ(format_, NATIVE_TEXTURE); if (!read_pixels_cb_.is_null()) read_pixels_cb_.Run(pixels); } #endif // static scoped_refptr VideoFrame::WrapExternalPackedMemory( Format format, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, uint8* data, size_t data_size, base::SharedMemoryHandle handle, base::TimeDelta timestamp, const base::Closure& no_longer_needed_cb) { gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); if (!IsValidConfig(format, new_coded_size, visible_rect, natural_size)) return NULL; if (data_size < AllocationSize(format, new_coded_size)) return NULL; switch (format) { case VideoFrame::I420: { scoped_refptr frame( new VideoFrame(format, new_coded_size, visible_rect, natural_size, scoped_ptr(), timestamp, false)); frame->shared_memory_handle_ = handle; frame->strides_[kYPlane] = new_coded_size.width(); frame->strides_[kUPlane] = new_coded_size.width() / 2; frame->strides_[kVPlane] = new_coded_size.width() / 2; frame->data_[kYPlane] = data; frame->data_[kUPlane] = data + new_coded_size.GetArea(); frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4); frame->no_longer_needed_cb_ = no_longer_needed_cb; return frame; } default: NOTIMPLEMENTED(); return NULL; } } #if defined(OS_POSIX) // static scoped_refptr VideoFrame::WrapExternalDmabufs( Format format, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, const std::vector dmabuf_fds, base::TimeDelta timestamp, const base::Closure& no_longer_needed_cb) { if (!IsValidConfig(format, coded_size, visible_rect, natural_size)) return NULL; if (dmabuf_fds.size() != NumPlanes(format)) { LOG(FATAL) << "Not enough dmabuf fds provided!"; return NULL; } scoped_refptr frame( new VideoFrame(format, coded_size, visible_rect, natural_size, scoped_ptr(), timestamp, false)); for (size_t i = 0; i < dmabuf_fds.size(); ++i) { int duped_fd = HANDLE_EINTR(dup(dmabuf_fds[i])); if (duped_fd == -1) { // The already-duped in previous iterations fds will be closed when // the partially-created frame drops out of scope here. DLOG(ERROR) << "Failed duplicating a dmabuf fd"; return NULL; } frame->dmabuf_fds_[i].reset(duped_fd); // Data is accessible only via fds. frame->data_[i] = NULL; frame->strides_[i] = 0; } frame->no_longer_needed_cb_ = no_longer_needed_cb; return frame; } #endif #if defined(OS_MACOSX) // static scoped_refptr VideoFrame::WrapCVPixelBuffer( CVPixelBufferRef cv_pixel_buffer, base::TimeDelta timestamp) { DCHECK(cv_pixel_buffer); DCHECK(CFGetTypeID(cv_pixel_buffer) == CVPixelBufferGetTypeID()); OSType cv_format = CVPixelBufferGetPixelFormatType(cv_pixel_buffer); Format format; // There are very few compatible CV pixel formats, so just check each. if (cv_format == kCVPixelFormatType_420YpCbCr8Planar) { format = Format::I420; } else if (cv_format == kCVPixelFormatType_444YpCbCr8) { format = Format::YV24; } else if (cv_format == '420v') { // TODO(jfroy): Use kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange when the // minimum OS X and iOS SDKs permits it. format = Format::NV12; } else { DLOG(ERROR) << "CVPixelBuffer format not supported: " << cv_format; return NULL; } gfx::Size coded_size(CVImageBufferGetEncodedSize(cv_pixel_buffer)); gfx::Rect visible_rect(CVImageBufferGetCleanRect(cv_pixel_buffer)); gfx::Size natural_size(CVImageBufferGetDisplaySize(cv_pixel_buffer)); if (!IsValidConfig(format, coded_size, visible_rect, natural_size)) return NULL; scoped_refptr frame( new VideoFrame(format, coded_size, visible_rect, natural_size, scoped_ptr(), timestamp, false)); frame->cv_pixel_buffer_.reset(cv_pixel_buffer, base::scoped_policy::RETAIN); return frame; } #endif // static scoped_refptr VideoFrame::WrapExternalYuvData( Format format, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, int32 y_stride, int32 u_stride, int32 v_stride, uint8* y_data, uint8* u_data, uint8* v_data, base::TimeDelta timestamp, const base::Closure& no_longer_needed_cb) { gfx::Size new_coded_size = AdjustCodedSize(format, coded_size); CHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size)); scoped_refptr frame( new VideoFrame(format, new_coded_size, visible_rect, natural_size, scoped_ptr(), timestamp, false)); frame->strides_[kYPlane] = y_stride; frame->strides_[kUPlane] = u_stride; frame->strides_[kVPlane] = v_stride; frame->data_[kYPlane] = y_data; frame->data_[kUPlane] = u_data; frame->data_[kVPlane] = v_data; frame->no_longer_needed_cb_ = no_longer_needed_cb; return frame; } // static scoped_refptr VideoFrame::WrapVideoFrame( const scoped_refptr& frame, const gfx::Rect& visible_rect, const gfx::Size& natural_size, const base::Closure& no_longer_needed_cb) { // NATIVE_TEXTURE frames need mailbox info propagated, and there's no support // for that here yet, see http://crbug/362521. CHECK_NE(frame->format(), NATIVE_TEXTURE); DCHECK(frame->visible_rect().Contains(visible_rect)); scoped_refptr wrapped_frame( new VideoFrame(frame->format(), frame->coded_size(), visible_rect, natural_size, scoped_ptr(), frame->timestamp(), frame->end_of_stream())); for (size_t i = 0; i < NumPlanes(frame->format()); ++i) { wrapped_frame->strides_[i] = frame->stride(i); wrapped_frame->data_[i] = frame->data(i); } wrapped_frame->no_longer_needed_cb_ = no_longer_needed_cb; return wrapped_frame; } // static scoped_refptr VideoFrame::CreateEOSFrame() { return new VideoFrame(VideoFrame::UNKNOWN, gfx::Size(), gfx::Rect(), gfx::Size(), scoped_ptr(), kNoTimestamp(), true); } // static scoped_refptr VideoFrame::CreateColorFrame( const gfx::Size& size, uint8 y, uint8 u, uint8 v, base::TimeDelta timestamp) { scoped_refptr frame = VideoFrame::CreateFrame( VideoFrame::YV12, size, gfx::Rect(size), size, timestamp); FillYUV(frame.get(), y, u, v); return frame; } // static scoped_refptr VideoFrame::CreateBlackFrame(const gfx::Size& size) { const uint8 kBlackY = 0x00; const uint8 kBlackUV = 0x80; const base::TimeDelta kZero; return CreateColorFrame(size, kBlackY, kBlackUV, kBlackUV, kZero); } // static scoped_refptr VideoFrame::CreateTransparentFrame( const gfx::Size& size) { const uint8 kBlackY = 0x00; const uint8 kBlackUV = 0x00; const uint8 kTransparentA = 0x00; const base::TimeDelta kZero; scoped_refptr frame = VideoFrame::CreateFrame( VideoFrame::YV12A, size, gfx::Rect(size), size, kZero); FillYUVA(frame.get(), kBlackY, kBlackUV, kBlackUV, kTransparentA); return frame; } #if defined(VIDEO_HOLE) // This block and other blocks wrapped around #if defined(VIDEO_HOLE) is not // maintained by the general compositor team. Please contact the following // people instead: // // wonsik@chromium.org // ycheo@chromium.org // static scoped_refptr VideoFrame::CreateHoleFrame( const gfx::Size& size) { DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size)); scoped_refptr frame( new VideoFrame(VideoFrame::HOLE, size, gfx::Rect(size), size, scoped_ptr(), base::TimeDelta(), false)); return frame; } #endif // defined(VIDEO_HOLE) // static size_t VideoFrame::NumPlanes(Format format) { switch (format) { case VideoFrame::NATIVE_TEXTURE: #if defined(VIDEO_HOLE) case VideoFrame::HOLE: #endif // defined(VIDEO_HOLE) return 0; case VideoFrame::NV12: return 2; case VideoFrame::YV12: case VideoFrame::YV16: case VideoFrame::I420: case VideoFrame::YV12J: case VideoFrame::YV24: return 3; case VideoFrame::YV12A: return 4; case VideoFrame::UNKNOWN: break; } NOTREACHED() << "Unsupported video frame format: " << format; return 0; } // static size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) { size_t total = 0; for (size_t i = 0; i < NumPlanes(format); ++i) total += PlaneAllocationSize(format, i, coded_size); return total; } // static gfx::Size VideoFrame::PlaneSize(Format format, size_t plane, const gfx::Size& coded_size) { // Align to multiple-of-two size overall. This ensures that non-subsampled // planes can be addressed by pixel with the same scaling as the subsampled // planes. const int width = RoundUp(coded_size.width(), 2); const int height = RoundUp(coded_size.height(), 2); switch (format) { case VideoFrame::YV24: switch (plane) { case VideoFrame::kYPlane: case VideoFrame::kUPlane: case VideoFrame::kVPlane: return gfx::Size(width, height); default: break; } break; case VideoFrame::YV12: case VideoFrame::YV12J: case VideoFrame::I420: switch (plane) { case VideoFrame::kYPlane: return gfx::Size(width, height); case VideoFrame::kUPlane: case VideoFrame::kVPlane: return gfx::Size(width / 2, height / 2); default: break; } break; case VideoFrame::YV12A: switch (plane) { case VideoFrame::kYPlane: case VideoFrame::kAPlane: return gfx::Size(width, height); case VideoFrame::kUPlane: case VideoFrame::kVPlane: return gfx::Size(width / 2, height / 2); default: break; } break; case VideoFrame::YV16: switch (plane) { case VideoFrame::kYPlane: return gfx::Size(width, height); case VideoFrame::kUPlane: case VideoFrame::kVPlane: return gfx::Size(width / 2, height); default: break; } break; case VideoFrame::NV12: switch (plane) { case VideoFrame::kYPlane: return gfx::Size(width, height); case VideoFrame::kUVPlane: return gfx::Size(width, height / 2); default: break; } break; case VideoFrame::UNKNOWN: case VideoFrame::NATIVE_TEXTURE: #if defined(VIDEO_HOLE) case VideoFrame::HOLE: #endif // defined(VIDEO_HOLE) break; } NOTREACHED() << "Unsupported video frame format/plane: " << format << "/" << plane; return gfx::Size(); } size_t VideoFrame::PlaneAllocationSize(Format format, size_t plane, const gfx::Size& coded_size) { // VideoFrame formats are (so far) all YUV and 1 byte per sample. return PlaneSize(format, plane, coded_size).GetArea(); } // static int VideoFrame::PlaneHorizontalBitsPerPixel(Format format, size_t plane) { switch (format) { case VideoFrame::YV24: switch (plane) { case kYPlane: case kUPlane: case kVPlane: return 8; default: break; } break; case VideoFrame::YV12: case VideoFrame::YV16: case VideoFrame::I420: case VideoFrame::YV12J: switch (plane) { case kYPlane: return 8; case kUPlane: case kVPlane: return 2; default: break; } break; case VideoFrame::YV12A: switch (plane) { case kYPlane: case kAPlane: return 8; case kUPlane: case kVPlane: return 2; default: break; } break; case VideoFrame::NV12: switch (plane) { case kYPlane: return 8; case kUVPlane: return 4; default: break; } break; case VideoFrame::UNKNOWN: #if defined(VIDEO_HOLE) case VideoFrame::HOLE: #endif // defined(VIDEO_HOLE) case VideoFrame::NATIVE_TEXTURE: break; } NOTREACHED() << "Unsupported video frame format/plane: " << format << "/" << plane; return 0; } // Release data allocated by AllocateYUV(). static void ReleaseData(uint8* data) { DCHECK(data); base::AlignedFree(data); } void VideoFrame::AllocateYUV() { DCHECK(format_ == VideoFrame::YV12 || format_ == VideoFrame::YV16 || format_ == VideoFrame::YV12A || format_ == VideoFrame::I420 || format_ == VideoFrame::YV12J || format_ == VideoFrame::YV24); // Align Y rows at least at 16 byte boundaries. The stride for both // YV12 and YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for // U and V applies to two rows of Y (one byte of UV for 4 bytes of Y), so in // the case of YV12 the strides are identical for the same width surface, but // the number of bytes allocated for YV12 is 1/2 the amount for U & V as // YV16. We also round the height of the surface allocated to be an even // number to avoid any potential of faulting by code that attempts to access // the Y values of the final row, but assumes that the last row of U & V // applies to a full two rows of Y. YV12A is the same as YV12, but with an // additional alpha plane that has the same size and alignment as the Y plane. size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane), kFrameSizeAlignment); size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane), kFrameSizeAlignment); // The *2 here is because some formats (e.g. h264) allow interlaced coding, // and then the size needs to be a multiple of two macroblocks (vertically). // See libavcodec/utils.c:avcodec_align_dimensions2(). size_t y_height = RoundUp(coded_size_.height(), kFrameSizeAlignment * 2); size_t uv_height = (format_ == VideoFrame::YV12 || format_ == VideoFrame::YV12A || format_ == VideoFrame::I420) ? y_height / 2 : y_height; size_t y_bytes = y_height * y_stride; size_t uv_bytes = uv_height * uv_stride; size_t a_bytes = format_ == VideoFrame::YV12A ? y_bytes : 0; // The extra line of UV being allocated is because h264 chroma MC // overreads by one line in some cases, see libavcodec/utils.c: // avcodec_align_dimensions2() and libavcodec/x86/h264_chromamc.asm: // put_h264_chroma_mc4_ssse3(). const size_t data_size = y_bytes + (uv_bytes * 2 + uv_stride) + a_bytes + kFrameSizePadding; uint8* data = reinterpret_cast( base::AlignedAlloc(data_size, kFrameAddressAlignment)); // FFmpeg expects the initialize allocation to be zero-initialized. Failure // to do so can lead to unitialized value usage. See http://crbug.com/390941 memset(data, 0, data_size); no_longer_needed_cb_ = base::Bind(&ReleaseData, data); COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0); data_[VideoFrame::kYPlane] = data; data_[VideoFrame::kUPlane] = data + y_bytes; data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes; strides_[VideoFrame::kYPlane] = y_stride; strides_[VideoFrame::kUPlane] = uv_stride; strides_[VideoFrame::kVPlane] = uv_stride; if (format_ == YV12A) { data_[VideoFrame::kAPlane] = data + y_bytes + (2 * uv_bytes); strides_[VideoFrame::kAPlane] = y_stride; } } VideoFrame::VideoFrame(VideoFrame::Format format, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, scoped_ptr mailbox_holder, base::TimeDelta timestamp, bool end_of_stream) : format_(format), coded_size_(coded_size), visible_rect_(visible_rect), natural_size_(natural_size), mailbox_holder_(mailbox_holder.Pass()), shared_memory_handle_(base::SharedMemory::NULLHandle()), timestamp_(timestamp), release_sync_point_(0), end_of_stream_(end_of_stream) { DCHECK(IsValidConfig(format_, coded_size_, visible_rect_, natural_size_)); memset(&strides_, 0, sizeof(strides_)); memset(&data_, 0, sizeof(data_)); } VideoFrame::~VideoFrame() { if (!mailbox_holder_release_cb_.is_null()) { uint32 release_sync_point; { // To ensure that changes to |release_sync_point_| are visible on this // thread (imply a memory barrier). base::AutoLock locker(release_sync_point_lock_); release_sync_point = release_sync_point_; } base::ResetAndReturn(&mailbox_holder_release_cb_).Run(release_sync_point); } if (!no_longer_needed_cb_.is_null()) base::ResetAndReturn(&no_longer_needed_cb_).Run(); } // static bool VideoFrame::IsValidPlane(size_t plane, VideoFrame::Format format) { return (plane < NumPlanes(format)); } int VideoFrame::stride(size_t plane) const { DCHECK(IsValidPlane(plane, format_)); return strides_[plane]; } // static size_t VideoFrame::RowBytes(size_t plane, VideoFrame::Format format, int width) { DCHECK(IsValidPlane(plane, format)); switch (format) { case VideoFrame::YV24: switch (plane) { case kYPlane: case kUPlane: case kVPlane: return width; default: break; } break; case VideoFrame::YV12: case VideoFrame::YV16: case VideoFrame::I420: case VideoFrame::YV12J: switch (plane) { case kYPlane: return width; case kUPlane: case kVPlane: return RoundUp(width, 2) / 2; default: break; } break; case VideoFrame::YV12A: switch (plane) { case kYPlane: case kAPlane: return width; case kUPlane: case kVPlane: return RoundUp(width, 2) / 2; default: break; } break; case VideoFrame::NV12: switch (plane) { case kYPlane: case kUVPlane: return width; default: break; } break; case VideoFrame::UNKNOWN: #if defined(VIDEO_HOLE) case VideoFrame::HOLE: #endif // defined(VIDEO_HOLE) case VideoFrame::NATIVE_TEXTURE: break; } NOTREACHED() << "Unsupported video frame format/plane: " << format << "/" << plane; return 0; } int VideoFrame::row_bytes(size_t plane) const { return RowBytes(plane, format_, coded_size_.width()); } // static size_t VideoFrame::Rows(size_t plane, VideoFrame::Format format, int height) { DCHECK(IsValidPlane(plane, format)); switch (format) { case VideoFrame::YV24: case VideoFrame::YV16: switch (plane) { case kYPlane: case kUPlane: case kVPlane: return height; default: break; } break; case VideoFrame::YV12: case VideoFrame::YV12J: case VideoFrame::I420: switch (plane) { case kYPlane: return height; case kUPlane: case kVPlane: return RoundUp(height, 2) / 2; default: break; } break; case VideoFrame::YV12A: switch (plane) { case kYPlane: case kAPlane: return height; case kUPlane: case kVPlane: return RoundUp(height, 2) / 2; default: break; } break; case VideoFrame::NV12: switch (plane) { case kYPlane: return height; case kUVPlane: return RoundUp(height, 2) / 2; default: break; } break; case VideoFrame::UNKNOWN: #if defined(VIDEO_HOLE) case VideoFrame::HOLE: #endif // defined(VIDEO_HOLE) case VideoFrame::NATIVE_TEXTURE: break; } NOTREACHED() << "Unsupported video frame format/plane: " << format << "/" << plane; return 0; } int VideoFrame::rows(size_t plane) const { return Rows(plane, format_, coded_size_.height()); } uint8* VideoFrame::data(size_t plane) const { DCHECK(IsValidPlane(plane, format_)); return data_[plane]; } const gpu::MailboxHolder* VideoFrame::mailbox_holder() const { DCHECK_EQ(format_, NATIVE_TEXTURE); return mailbox_holder_.get(); } base::SharedMemoryHandle VideoFrame::shared_memory_handle() const { return shared_memory_handle_; } void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) { DCHECK_EQ(format_, NATIVE_TEXTURE); base::AutoLock locker(release_sync_point_lock_); // Must wait on the previous sync point before inserting a new sync point so // that |mailbox_holder_release_cb_| guarantees the previous sync point // occurred when it waits on |release_sync_point_|. if (release_sync_point_) client->WaitSyncPoint(release_sync_point_); release_sync_point_ = client->InsertSyncPoint(); } #if defined(OS_POSIX) int VideoFrame::dmabuf_fd(size_t plane) const { return dmabuf_fds_[plane].get(); } #endif #if defined(OS_MACOSX) CVPixelBufferRef VideoFrame::cv_pixel_buffer() const { return cv_pixel_buffer_.get(); } #endif void VideoFrame::HashFrameForTesting(base::MD5Context* context) { for (int plane = 0; plane < kMaxPlanes; ++plane) { if (!IsValidPlane(plane, format_)) break; for (int row = 0; row < rows(plane); ++row) { base::MD5Update(context, base::StringPiece( reinterpret_cast(data(plane) + stride(plane) * row), row_bytes(plane))); } } } } // namespace media