/external/webrtc/webrtc/common_video/libyuv/ |
D | webrtc_libyuv.cc | 251 dst_frame->buffer(kUPlane), in ConvertToI420() 252 dst_frame->stride(kUPlane), in ConvertToI420() 268 src_frame.buffer(kUPlane), in ConvertFromI420() 269 src_frame.stride(kUPlane), in ConvertFromI420() 287 src_frame.buffer(kUPlane), in ConvertFromYV12() 288 src_frame.stride(kUPlane), in ConvertFromYV12() 306 ref_frame->buffer(kUPlane), in I420PSNR() 307 ref_frame->stride(kUPlane), in I420PSNR() 312 test_frame->buffer(kUPlane), in I420PSNR() 313 test_frame->stride(kUPlane), in I420PSNR() [all …]
|
D | scaler.cc | 75 const uint8_t* u_ptr = src_frame.buffer(kUPlane) + in Scale() 76 src_offset_y / 2 * src_frame.stride(kUPlane) + in Scale() 85 src_frame.stride(kUPlane), in Scale() 91 dst_frame->buffer(kUPlane), in Scale() 92 dst_frame->stride(kUPlane), in Scale()
|
/external/webrtc/webrtc/common_video/ |
D | i420_video_frame_unittest.cc | 60 int stride_u = frame.stride(kUPlane); in TEST() 65 EXPECT_EQ(ExpectedSize(stride_u, height, kUPlane), in TEST() 66 frame.allocated_size(kUPlane)); in TEST() 111 memset(small_frame.buffer(kUPlane), 2, small_frame.allocated_size(kUPlane)); in TEST() 152 EXPECT_TRUE(const_frame1_ptr->buffer(kUPlane) == in TEST() 153 const_frame2_ptr->buffer(kUPlane)); in TEST() 208 EXPECT_TRUE(EqualPlane(buffer_u, frame2.buffer(kUPlane), stride_uv, 8, 8)); in TEST() 213 EXPECT_LE(kSizeUv, frame2.allocated_size(kUPlane)); in TEST() 221 const uint8_t* u = frame.buffer(kUPlane); in TEST() 225 EXPECT_EQ(u, frame.buffer(kUPlane)); in TEST() [all …]
|
D | video_frame.cc | 82 stride_u == stride(kUPlane) && stride_v == stride(kVPlane)) { in CreateEmptyFrame() 119 memcpy(buffer(kUPlane), buffer_u, expected_size_u); in CreateFrame() 145 CreateFrame(videoFrame.buffer(kYPlane), videoFrame.buffer(kUPlane), in CopyFrame() 148 videoFrame.stride(kUPlane), videoFrame.stride(kVPlane)); in CopyFrame() 228 stride(kUPlane) != frame.stride(kUPlane) || in EqualsFrame() 239 EqualPlane(buffer(kUPlane), frame.buffer(kUPlane), in EqualsFrame() 240 stride(kUPlane), half_width, half_height) && in EqualsFrame()
|
D | video_frame_buffer.cc | 67 case kUPlane: in data() 88 case kUPlane: in stride() 174 case kUPlane: in data() 188 case kUPlane: in stride() 226 const uint8_t* u_plane = buffer->data(kUPlane) + in ShallowCenterCrop() 227 buffer->stride(kUPlane) * uv_offset_y + uv_offset_x; in ShallowCenterCrop() 233 u_plane, buffer->stride(kUPlane), in ShallowCenterCrop()
|
D | i420_buffer_pool_unittest.cc | 25 const uint8_t* u_ptr = buffer->data(kUPlane); in TEST() 32 EXPECT_EQ(u_ptr, buffer->data(kUPlane)); in TEST() 42 const uint8_t* u_ptr = buffer->data(kUPlane); in TEST() 50 EXPECT_NE(u_ptr, buffer->data(kUPlane)); in TEST()
|
/external/webrtc/talk/media/webrtc/ |
D | webrtcvideoframe.cc | 37 using webrtc::kUPlane; 104 return video_frame_buffer_ ? video_frame_buffer_->data(kUPlane) : nullptr; in GetUPlane() 117 return video_frame_buffer_ ? video_frame_buffer_->MutableData(kUPlane) in GetUPlane() 131 return video_frame_buffer_ ? video_frame_buffer_->stride(kUPlane) : 0; in GetUPitch() 169 video_frame_buffer_->stride(kUPlane), in MakeExclusive() 173 new_buffer->MutableData(kYPlane), new_buffer->MutableData(kUPlane), in MakeExclusive() 175 new_buffer->stride(kUPlane), new_buffer->stride(kVPlane))) { in MakeExclusive()
|
/external/webrtc/webrtc/video/ |
D | video_capture_input_unittest.cc | 267 (frame1.stride(kUPlane) == frame2.stride(kUPlane)) && in EqualBufferFrames() 271 (frame1.allocated_size(kUPlane) == frame2.allocated_size(kUPlane)) && in EqualBufferFrames() 275 (memcmp(frame1.buffer(kUPlane), frame2.buffer(kUPlane), in EqualBufferFrames() 276 frame1.allocated_size(kUPlane)) == 0) && in EqualBufferFrames()
|
D | video_encoder_unittest.cc | 112 memset(frame_.buffer(webrtc::kUPlane), 128, in EncodeFrame() 113 frame_.allocated_size(webrtc::kUPlane)); in EncodeFrame()
|
/external/webrtc/webrtc/modules/video_processing/ |
D | video_denoiser.cc | 64 int stride_u = frame.stride(kUPlane); in DenoiseFrame() 71 denoised_frame->CreateFrame(frame.buffer(kYPlane), frame.buffer(kUPlane), in DenoiseFrame() 86 uint8_t* u_dst = denoised_frame->buffer(kUPlane); in DenoiseFrame() 89 const uint8_t* u_src = frame.buffer(kUPlane); in DenoiseFrame()
|
/external/webrtc/webrtc/test/ |
D | frame_generator.cc | 43 memset(frame_.buffer(kUPlane), u, frame_.allocated_size(kUPlane)); in NextFrame() 206 int offset_u = (current_source_frame_->stride(PlaneType::kUPlane) * in CropSourceToScrolledImage() 215 ¤t_source_frame_->buffer(PlaneType::kUPlane)[offset_u], in CropSourceToScrolledImage() 219 current_source_frame_->stride(PlaneType::kUPlane), in CropSourceToScrolledImage()
|
D | frame_generator_unittest.cc | 66 ASSERT_EQ(uv_size, frame->allocated_size(PlaneType::kUPlane)); in CheckFrameAndMutate() 67 buffer = frame->buffer(PlaneType::kUPlane); in CheckFrameAndMutate()
|
D | fake_texture_frame.h | 46 memset(buffer->MutableData(kUPlane), 0, half_height * half_width); in NativeToI420Buffer()
|
/external/webrtc/webrtc/modules/video_coding/codecs/vp8/ |
D | simulcast_unittest.h | 129 EXPECT_NEAR(kColorU, decoded_image.buffer(kUPlane)[i], 4); in Decoded() 315 memset(input_frame_.buffer(kUPlane), 0, in SetUpCodec() 316 input_frame_.allocated_size(kUPlane)); in SetUpCodec() 565 memset(input_frame_.buffer(kUPlane), 0, in SwitchingToOneStream() 566 input_frame_.allocated_size(kUPlane)); in SwitchingToOneStream() 606 memset(input_frame_.buffer(kUPlane), 0, in SwitchingToOneStream() 607 input_frame_.allocated_size(kUPlane)); in SwitchingToOneStream() 695 plane_offset[kUPlane] = kColorU; in TestRPSIEncodeDecode() 711 plane_offset[kUPlane] += 1; in TestRPSIEncodeDecode() 719 plane_offset[kUPlane] += 1; in TestRPSIEncodeDecode() [all …]
|
D | simulcast_encoder_adapter.cc | 295 input_image.buffer(kUPlane), input_image.stride(kUPlane), in Encode() 298 dst_frame.buffer(kUPlane), dst_frame.stride(kUPlane), in Encode()
|
/external/webrtc/webrtc/modules/video_coding/codecs/h264/ |
D | h264_video_toolbox_decoder.cc | 68 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), in VideoFrameBufferForPixelBuffer()
|
D | h264_video_toolbox_encoder.cc | 144 frame.buffer(webrtc::kUPlane), frame.stride(webrtc::kUPlane), in CopyVideoFrameToPixelBuffer()
|
/external/webrtc/talk/app/webrtc/java/jni/ |
D | androidvideocapturer_jni.cc | 167 buffer->MutableData(webrtc::kUPlane), buffer->stride(webrtc::kUPlane), in OnMemoryBufferFrame()
|
D | androidmediadecoder_jni.cc | 703 frame_buffer->MutableData(webrtc::kUPlane), in DeliverPendingOutputs() 704 frame_buffer->stride(webrtc::kUPlane), in DeliverPendingOutputs() 717 frame_buffer->MutableData(webrtc::kUPlane), in DeliverPendingOutputs() 718 frame_buffer->stride(webrtc::kUPlane), in DeliverPendingOutputs()
|
/external/webrtc/webrtc/modules/video_render/android/ |
D | video_render_opengles20.cc | 386 GlTexSubImage2D(width / 2, height / 2, frameToRender.stride(kUPlane), in UpdateTextures() 387 frameToRender.buffer(kUPlane)); in UpdateTextures()
|
/external/webrtc/webrtc/modules/video_capture/test/ |
D | video_capture_unittest.cc | 69 (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) && in CompareFrames() 458 memset(test_frame_.buffer(webrtc::kUPlane), 127, in SetUp()
|
/external/webrtc/webrtc/common_video/include/ |
D | video_frame_buffer.h | 24 kUPlane = 1, enumerator
|
/external/webrtc/webrtc/modules/video_processing/test/ |
D | video_processing_unittest.cc | 76 memset(video_frame_.buffer(kUPlane), 0, video_frame_.allocated_size(kUPlane)); in SetUp()
|
/external/webrtc/webrtc/modules/video_render/test/testAPI/ |
D | testAPI.cc | 280 memset(frame->buffer(kUPlane), color, frame->allocated_size(kUPlane)); in GetTestVideoFrame()
|
/external/webrtc/webrtc/modules/video_render/ios/ |
D | open_gles20.mm | 324 width / 2, height / 2, frame.stride(kUPlane), frame.buffer(kUPlane));
|