Home
last modified time | relevance | path

Searched refs:frame2 (Results 1 – 25 of 42) sorted by relevance

12

/external/webrtc/talk/media/base/
Dvideoframe_unittest.h495 const cricket::VideoFrame& frame2, in IsEqual() argument
498 frame2.GetWidth(), frame2.GetHeight(), in IsEqual()
499 frame2.GetPixelWidth(), frame2.GetPixelHeight(), in IsEqual()
500 frame2.GetTimeStamp(), in IsEqual()
501 frame2.GetYPlane(), frame2.GetYPitch(), in IsEqual()
502 frame2.GetUPlane(), frame2.GetUPitch(), in IsEqual()
503 frame2.GetVPlane(), frame2.GetVPitch(), in IsEqual()
508 const cricket::VideoFrame& frame2, in IsEqualWithCrop() argument
510 return frame1.GetWidth() <= frame2.GetWidth() && in IsEqualWithCrop()
511 frame1.GetHeight() <= frame2.GetHeight() && in IsEqualWithCrop()
[all …]
/external/webrtc/webrtc/common_video/
Di420_video_frame_unittest.cc37 VideoFrame frame2; in TEST() local
38 frame2.CopyFrame(frame); in TEST()
143 VideoFrame frame2; in TEST() local
144 frame2.ShallowCopy(frame1); in TEST()
148 const VideoFrame* const_frame2_ptr = &frame2; in TEST()
157 EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); in TEST()
158 EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); in TEST()
159 EXPECT_EQ(frame2.render_time_ms(), frame1.render_time_ms()); in TEST()
160 EXPECT_EQ(frame2.rotation(), frame1.rotation()); in TEST()
162 frame2.set_timestamp(timestamp + 1); in TEST()
[all …]
/external/webrtc/webrtc/video/
Dvideo_capture_input_unittest.cc42 bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
43 bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2);
44 bool EqualBufferFrames(const VideoFrame& frame1, const VideoFrame& frame2);
250 bool EqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { in EqualFrames() argument
251 if (frame1.native_handle() != NULL || frame2.native_handle() != NULL) in EqualFrames()
252 return EqualTextureFrames(frame1, frame2); in EqualFrames()
253 return EqualBufferFrames(frame1, frame2); in EqualFrames()
256 bool EqualTextureFrames(const VideoFrame& frame1, const VideoFrame& frame2) { in EqualTextureFrames() argument
257 return ((frame1.native_handle() == frame2.native_handle()) && in EqualTextureFrames()
258 (frame1.width() == frame2.width()) && in EqualTextureFrames()
[all …]
Dvideo_send_stream_tests.cc47 void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2);
49 const VideoFrame& frame2);
51 const VideoFrame& frame2);
1191 void ExpectEqualFrames(const VideoFrame& frame1, const VideoFrame& frame2) { in ExpectEqualFrames() argument
1192 if (frame1.native_handle() != nullptr || frame2.native_handle() != nullptr) in ExpectEqualFrames()
1193 ExpectEqualTextureFrames(frame1, frame2); in ExpectEqualFrames()
1195 ExpectEqualBufferFrames(frame1, frame2); in ExpectEqualFrames()
1199 const VideoFrame& frame2) { in ExpectEqualTextureFrames() argument
1200 EXPECT_EQ(frame1.native_handle(), frame2.native_handle()); in ExpectEqualTextureFrames()
1201 EXPECT_EQ(frame1.width(), frame2.width()); in ExpectEqualTextureFrames()
[all …]
/external/opencv3/modules/video/perf/
Dperf_optflowpyrlk.cpp57 Mat frame1, frame2; variable
62 cvtColor(img2, frame2, COLOR_BGR2GRAY, cn);
66 frame2 = img2;
70 cvtColor(img2, frame2, COLOR_BGR2BGRA, cn);
86 declare.in(frame1, frame2, inPoints).out(outPoints);
90 calcOpticalFlowPyrLK(frame1, frame2, inPoints, outPoints, status, err,
131 Mat frame1, frame2; variable
136 cvtColor(img2, frame2, COLOR_BGR2GRAY, cn);
140 frame2 = img2;
144 cvtColor(img2, frame2, COLOR_BGR2BGRA, cn);
[all …]
Dperf_tvl1optflow.cpp19 Mat frame2 = imread(getDataPath(GetParam().second), IMREAD_GRAYSCALE); variable
21 ASSERT_FALSE(frame2.empty());
27 TEST_CYCLE_N(10) tvl1->calc(frame1, frame2, flow);
/external/google-breakpad/src/processor/
Dstackwalker_arm_unittest.cc275 StackFrameARM *frame2 = static_cast<StackFrameARM *>(frames->at(2)); in TEST_F() local
276 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F()
279 frame2->context_validity); in TEST_F()
280 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); in TEST_F()
281 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM_REG_SP]); in TEST_F()
864 StackFrameARM *frame2 = static_cast<StackFrameARM *>(frames->at(2)); in TEST_F() local
865 EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame2->trust); in TEST_F()
870 frame2->context_validity); in TEST_F()
871 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); in TEST_F()
872 EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM_REG_LR]); in TEST_F()
[all …]
Dstackwalker_x86_unittest.cc739 StackFrameX86 *frame2 = static_cast<StackFrameX86 *>(frames->at(2)); in TEST_F() local
740 EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame2->trust); in TEST_F()
745 frame2->context_validity); in TEST_F()
746 EXPECT_EQ(0x2a179e38U, frame2->instruction + 1); in TEST_F()
747 EXPECT_EQ(0x2a179e38U, frame2->context.eip); in TEST_F()
748 EXPECT_EQ(frame2_esp.Value(), frame2->context.esp); in TEST_F()
749 EXPECT_EQ(frame2_ebp.Value(), frame2->context.ebp); in TEST_F()
750 EXPECT_EQ(0x2558c7f3U, frame2->context.ebx); in TEST_F()
751 EXPECT_EQ(NULL, frame2->module); in TEST_F()
752 EXPECT_EQ(NULL, frame2->windows_frame_info); in TEST_F()
[all …]
Dstackwalker_arm64_unittest.cc253 StackFrameARM64 *frame2 = static_cast<StackFrameARM64 *>(frames->at(2)); in TEST_F() local
254 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F()
257 frame2->context_validity); in TEST_F()
258 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM64_REG_PC]); in TEST_F()
259 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM64_REG_SP]); in TEST_F()
524 StackFrameARM64 *frame2 = static_cast<StackFrameARM64 *>(frames->at(2)); in TEST_F() local
525 EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame2->trust); in TEST_F()
530 frame2->context_validity); in TEST_F()
531 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM64_REG_PC]); in TEST_F()
532 EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM64_REG_LR]); in TEST_F()
[all …]
Dstackwalker_mips_unittest.cc289 StackFrameMIPS* frame2 = static_cast<StackFrameMIPS*>(frames->at(2)); in TEST_F() local
290 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F()
295 frame2->context_validity); in TEST_F()
296 EXPECT_EQ(return_address2 - 2 * sizeof(return_address2), frame2->context.epc); in TEST_F()
297 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_MIPS_REG_SP]); in TEST_F()
Dstackwalker_amd64_unittest.cc296 StackFrameAMD64 *frame2 = static_cast<StackFrameAMD64 *>(frames->at(2)); in TEST_F() local
297 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F()
300 frame2->context_validity); in TEST_F()
301 EXPECT_EQ(return_address2, frame2->context.rip); in TEST_F()
302 EXPECT_EQ(frame2_sp.Value(), frame2->context.rsp); in TEST_F()
/external/valgrind/memcheck/tests/
Dxml1.c37 int frame2 ( void ) in frame2() function
44 return frame2() + 1; in frame1()
/external/webrtc/talk/media/webrtc/
Dwebrtcvideoframe_unittest.cc318 cricket::VideoFrame* frame2 = frame1.Copy(); in TEST_F() local
319 EXPECT_EQ(frame1.GetNativeHandle(), frame2->GetNativeHandle()); in TEST_F()
320 EXPECT_EQ(frame1.GetWidth(), frame2->GetWidth()); in TEST_F()
321 EXPECT_EQ(frame1.GetHeight(), frame2->GetHeight()); in TEST_F()
322 EXPECT_EQ(frame1.GetTimeStamp(), frame2->GetTimeStamp()); in TEST_F()
323 delete frame2; in TEST_F()
/external/libvpx/libvpx/vp8/encoder/mips/msa/
Dtemporal_filter_msa.c138 v16i8 frame2 = { 0 }; in temporal_filter_apply_8size_msa() local
171 INSERT_D2_SB(f2, f3, frame2); in temporal_filter_apply_8size_msa()
174 ILVRL_B2_UB(frame1, frame2, frame_l, frame_h); in temporal_filter_apply_8size_msa()
200 UNPCK_UB_SH(frame2, frame2_0_h, frame2_1_h); in temporal_filter_apply_8size_msa()
254 uint8_t *frame2, uint32_t block_size, in vp8_temporal_filter_apply_msa() argument
260 temporal_filter_apply_8size_msa(frame1, stride, frame2, strength, in vp8_temporal_filter_apply_msa()
265 temporal_filter_apply_16size_msa(frame1, stride, frame2, strength, in vp8_temporal_filter_apply_msa()
280 int pixel_value = *frame2++; in vp8_temporal_filter_apply_msa()
/external/webrtc/webrtc/modules/utility/source/
Daudio_frame_operations_unittest.cc43 void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { in VerifyFramesAreEqual() argument
44 EXPECT_EQ(frame1.num_channels_, frame2.num_channels_); in VerifyFramesAreEqual()
46 frame2.samples_per_channel_); in VerifyFramesAreEqual()
50 EXPECT_EQ(frame1.data_[i], frame2.data_[i]); in VerifyFramesAreEqual()
/external/opencv3/modules/video/test/
Dtest_tvl1optflow.cpp151 Mat frame2 = imread(frame2_path, IMREAD_GRAYSCALE); in TEST() local
153 ASSERT_FALSE(frame2.empty()); in TEST()
158 tvl1->calc(frame1, frame2, flow); in TEST()
/external/webrtc/webrtc/modules/video_capture/test/
Dvideo_capture_unittest.cc66 const webrtc::VideoFrame& frame2) { in CompareFrames() argument
68 (frame1.stride(webrtc::kYPlane) == frame2.stride(webrtc::kYPlane)) && in CompareFrames()
69 (frame1.stride(webrtc::kUPlane) == frame2.stride(webrtc::kUPlane)) && in CompareFrames()
70 (frame1.stride(webrtc::kVPlane) == frame2.stride(webrtc::kVPlane)) && in CompareFrames()
71 (frame1.width() == frame2.width()) && in CompareFrames()
72 (frame1.height() == frame2.height()); in CompareFrames()
79 int allocated_size2 = frame2.allocated_size(plane_type); in CompareFrames()
83 const uint8_t* plane_buffer2 = frame2.buffer(plane_type); in CompareFrames()
/external/webrtc/webrtc/modules/video_processing/test/
Dvideo_processing_unittest.cc54 const webrtc::VideoFrame& frame2);
381 const webrtc::VideoFrame& frame2) { argument
385 int allocated_size2 = frame2.allocated_size(plane_type);
389 const uint8_t* plane_buffer2 = frame2.buffer(plane_type);
/external/skia/tests/
DPathOpsConicIntersectionTest.cpp193 const SkDConic frame2[] = { variable
229 frame0, frame1, frame2, frame3, frame4, frame5, frame6
233 (int) SK_ARRAY_COUNT(frame2), (int) SK_ARRAY_COUNT(frame3),
/external/opencv3/doc/py_tutorials/py_video/py_lucas_kanade/
Dpy_lucas_kanade.markdown189 ret, frame2 = cap.read()
190 next = cv2.cvtColor(frame2,cv2.COLOR_BGR2GRAY)
199 cv2.imshow('frame2',bgr)
204 cv2.imwrite('opticalfb.png',frame2)
/external/libvpx/libvpx/vp9/common/
Dvp9_rtcd_defs.pl320 …_temporal_filter_apply/, "uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int bloc…
346 …_temporal_filter_apply/, "uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned int bloc…
/external/libvpx/libvpx/vp9/encoder/
Dvp9_temporal_filter.c124 uint8_t *frame2, in vp9_temporal_filter_apply_c() argument
139 int pixel_value = *frame2++; in vp9_temporal_filter_apply_c()
177 uint16_t *frame2 = CONVERT_TO_SHORTPTR(frame2_8); in vp9_highbd_temporal_filter_apply_c() local
186 int pixel_value = *frame2++; in vp9_highbd_temporal_filter_apply_c()
/external/libvpx/config/x86/
Dvp9_rtcd.h122 void vp9_temporal_filter_apply_c(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned in…
123 void vp9_temporal_filter_apply_sse2(uint8_t *frame1, unsigned int stride, uint8_t *frame2, unsigned…
/external/libvpx/libvpx/vp8/encoder/x86/
Dtemporal_filter_apply_sse2.asm17 ; unsigned char *frame2, | 2
/external/libvpx/libvpx/vp9/encoder/x86/
Dvp9_temporal_filter_apply_sse2.asm17 ; unsigned char *frame2, | 2

12