/external/webrtc/api/audio/test/ |
D | audio_frame_unittest.cc | 109 AudioFrame frame2; in TEST() local 112 frame2.UpdateFrame(kTimestamp, samples, kSamplesPerChannel, kSampleRateHz, in TEST() 115 frame1.CopyFrom(frame2); in TEST() 117 EXPECT_EQ(frame2.timestamp_, frame1.timestamp_); in TEST() 118 EXPECT_EQ(frame2.samples_per_channel_, frame1.samples_per_channel_); in TEST() 119 EXPECT_EQ(frame2.sample_rate_hz_, frame1.sample_rate_hz_); in TEST() 120 EXPECT_EQ(frame2.speech_type_, frame1.speech_type_); in TEST() 121 EXPECT_EQ(frame2.vad_activity_, frame1.vad_activity_); in TEST() 122 EXPECT_EQ(frame2.num_channels_, frame1.num_channels_); in TEST() 124 EXPECT_EQ(frame2.muted(), frame1.muted()); in TEST() [all …]
|
/external/python/cpython2/Demo/tkinter/ttk/ |
D | roundframe.py | 98 frame2 = ttk.Frame(style="RoundedFrame", padding=10) variable 99 frame2.pack(fill='both', expand=1) 106 text = Tkinter.Text(frame2, borderwidth=0, bg="white", highlightthickness=0) 108 text.bind("<FocusIn>", lambda evt: frame2.state(["focus"])) 109 text.bind("<FocusOut>", lambda evt: frame2.state(["!focus"]))
|
/external/tensorflow/tensorflow/python/util/ |
D | tf_stack_test.py | 45 frame1, frame2 = tf_stack.extract_stack(), tf_stack.extract_stack() 46 self.assertEqual(len(frame1), len(frame2)) 47 for f1, f2 in zip(frame1, frame2): 51 self.assertEqual(frame1, frame2) 52 self.assertEqual(hash(tuple(frame1)), hash(tuple(frame2)))
|
/external/webrtc/modules/audio_mixer/ |
D | frame_combiner_unittest.cc | 59 AudioFrame frame2; variable 63 for (auto* frame : {&frame1, &frame2}) { in SetUpFrames() 76 const std::vector<AudioFrame*> all_frames = {&frame1, &frame2}; in TEST() 100 const std::vector<AudioFrame*> all_frames = {&frame1, &frame2}; in TEST() 129 const std::vector<AudioFrame*> all_frames = {&frame1, &frame2}; in TEST() 156 const std::vector<AudioFrame*> all_frames = {&frame1, &frame2}; in TEST() 247 AudioFrameOperations::Mute(&frame2); in TEST() 251 frames_to_combine.push_back(&frame2); in TEST()
|
/external/google-breakpad/src/processor/ |
D | stackwalker_arm_unittest.cc | 276 StackFrameARM *frame2 = static_cast<StackFrameARM *>(frames->at(2)); in TEST_F() local 277 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F() 280 frame2->context_validity); in TEST_F() 281 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); in TEST_F() 282 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM_REG_SP]); in TEST_F() 869 StackFrameARM *frame2 = static_cast<StackFrameARM *>(frames->at(2)); in TEST_F() local 870 EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame2->trust); in TEST_F() 875 frame2->context_validity); in TEST_F() 876 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM_REG_PC]); in TEST_F() 877 EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM_REG_LR]); in TEST_F() [all …]
|
D | stackwalker_x86_unittest.cc | 740 StackFrameX86 *frame2 = static_cast<StackFrameX86 *>(frames->at(2)); in TEST_F() local 741 EXPECT_EQ(StackFrame::FRAME_TRUST_CFI, frame2->trust); in TEST_F() 746 frame2->context_validity); in TEST_F() 747 EXPECT_EQ(0x2a179e38U, frame2->instruction + 1); in TEST_F() 748 EXPECT_EQ(0x2a179e38U, frame2->context.eip); in TEST_F() 749 EXPECT_EQ(frame2_esp.Value(), frame2->context.esp); in TEST_F() 750 EXPECT_EQ(frame2_ebp.Value(), frame2->context.ebp); in TEST_F() 751 EXPECT_EQ(0x2558c7f3U, frame2->context.ebx); in TEST_F() 752 EXPECT_EQ(NULL, frame2->module); in TEST_F() 753 EXPECT_EQ(NULL, frame2->windows_frame_info); in TEST_F() [all …]
|
D | stackwalker_arm64_unittest.cc | 254 StackFrameARM64 *frame2 = static_cast<StackFrameARM64 *>(frames->at(2)); in TEST_F() local 255 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F() 258 frame2->context_validity); in TEST_F() 259 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM64_REG_PC]); in TEST_F() 260 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_ARM64_REG_SP]); in TEST_F() 525 StackFrameARM64 *frame2 = static_cast<StackFrameARM64 *>(frames->at(2)); in TEST_F() local 526 EXPECT_EQ(StackFrame::FRAME_TRUST_FP, frame2->trust); in TEST_F() 531 frame2->context_validity); in TEST_F() 532 EXPECT_EQ(return_address2, frame2->context.iregs[MD_CONTEXT_ARM64_REG_PC]); in TEST_F() 533 EXPECT_EQ(0U, frame2->context.iregs[MD_CONTEXT_ARM64_REG_LR]); in TEST_F() [all …]
|
D | stackwalker_mips64_unittest.cc | 298 StackFrameMIPS* frame2 = static_cast<StackFrameMIPS*>(frames->at(2)); in TEST_F() local 299 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F() 304 frame2->context_validity); in TEST_F() 305 EXPECT_EQ(return_address2 - 2 * sizeof(return_address2), frame2->context.epc); in TEST_F() 306 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_MIPS_REG_SP]); in TEST_F()
|
D | stackwalker_mips_unittest.cc | 293 StackFrameMIPS* frame2 = static_cast<StackFrameMIPS*>(frames->at(2)); in TEST_F() local 294 EXPECT_EQ(StackFrame::FRAME_TRUST_SCAN, frame2->trust); in TEST_F() 299 frame2->context_validity); in TEST_F() 300 EXPECT_EQ(return_address2 - 2 * sizeof(return_address2), frame2->context.epc); in TEST_F() 301 EXPECT_EQ(frame2_sp.Value(), frame2->context.iregs[MD_CONTEXT_MIPS_REG_SP]); in TEST_F()
|
/external/webrtc/common_video/ |
D | video_frame_unittest.cc | 321 VideoFrame frame2(frame1); in TEST() local 323 EXPECT_EQ(frame1.video_frame_buffer(), frame2.video_frame_buffer()); in TEST() 327 frame2.video_frame_buffer()->GetI420(); in TEST() 332 EXPECT_EQ(frame2.timestamp(), frame1.timestamp()); in TEST() 333 EXPECT_EQ(frame2.ntp_time_ms(), frame1.ntp_time_ms()); in TEST() 334 EXPECT_EQ(frame2.timestamp_us(), frame1.timestamp_us()); in TEST() 335 EXPECT_EQ(frame2.rotation(), frame1.rotation()); in TEST() 337 frame2.set_timestamp(timestamp + 1); in TEST() 338 frame2.set_ntp_time_ms(ntp_time_ms + 1); in TEST() 339 frame2.set_timestamp_us(timestamp_us + 1); in TEST() [all …]
|
/external/libaom/libaom/av1/encoder/x86/ |
D | temporal_filter_avx2.c | 34 const uint8_t *frame1, const unsigned int stride, const uint8_t *frame2, in get_squared_error_16x16_avx2() argument 39 const uint8_t *src2 = frame2; in get_squared_error_16x16_avx2() 62 const uint8_t *frame1, const unsigned int stride, const uint8_t *frame2, in get_squared_error_32x32_avx2() argument 67 const uint8_t *src2 = frame2; in get_squared_error_32x32_avx2() 131 const uint8_t *frame1, const unsigned int stride, const uint8_t *frame2, in apply_temporal_filter() argument 143 get_squared_error_32x32_avx2(frame1, stride, frame2, stride2, block_width, in apply_temporal_filter() 146 get_squared_error_16x16_avx2(frame1, stride, frame2, stride2, block_width, in apply_temporal_filter() 197 const int pixel_value = frame2[i * stride2 + j]; in apply_temporal_filter()
|
D | highbd_temporal_filter_avx2.c | 29 const uint16_t *frame1, const unsigned int stride, const uint16_t *frame2, in get_squared_error_16x16_avx2() argument 34 const uint16_t *src2 = frame2; in get_squared_error_16x16_avx2() 60 const uint16_t *frame1, const unsigned int stride, const uint16_t *frame2, in get_squared_error_32x32_avx2() argument 65 const uint16_t *src2 = frame2; in get_squared_error_32x32_avx2() 145 const uint16_t *frame1, const unsigned int stride, const uint16_t *frame2, in highbd_apply_temporal_filter() argument 157 get_squared_error_32x32_avx2(frame1, stride, frame2, stride2, block_width, in highbd_apply_temporal_filter() 160 get_squared_error_16x16_avx2(frame1, stride, frame2, stride2, block_width, in highbd_apply_temporal_filter() 309 const int pixel_value = frame2[i * stride2 + j]; in highbd_apply_temporal_filter()
|
D | temporal_filter_sse2.c | 34 const uint8_t *frame2, const unsigned int stride2, in get_squared_error() argument 39 const uint8_t *src2 = frame2; in get_squared_error() 106 const uint8_t *frame1, const unsigned int stride, const uint8_t *frame2, in apply_temporal_filter() argument 117 get_squared_error(frame1, stride, frame2, stride2, block_width, block_height, in apply_temporal_filter() 174 const int pixel_value = frame2[i * stride2 + j]; in apply_temporal_filter()
|
D | highbd_temporal_filter_sse2.c | 34 const uint16_t *frame2, in get_squared_error() argument 39 const uint16_t *src2 = frame2; in get_squared_error() 93 const uint16_t *frame1, const unsigned int stride, const uint16_t *frame2, in highbd_apply_temporal_filter() argument 104 get_squared_error(frame1, stride, frame2, stride2, block_width, block_height, in highbd_apply_temporal_filter() 184 const int pixel_value = frame2[i * stride2 + j]; in highbd_apply_temporal_filter()
|
/external/deqp-deps/amber/tests/cases/ |
D | buffer_emd.amber | 45 BUFFER frame2 FORMAT B8G8R8A8_UNORM 60 BIND BUFFER frame2 AS color LOCATION 0 70 EXPECT frame1 EQ_HISTOGRAM_EMD_BUFFER frame2 TOLERANCE 0.1
|
/external/python/cpython3/Tools/pynche/ |
D | StripViewer.py | 350 frame2 = Frame(frame) 351 frame2.pack(expand=YES, fill=BOTH) 352 frame2.columnconfigure(0, weight=20) 353 frame2.columnconfigure(2, weight=20) 358 blackbtn = Button(frame2, 364 uwdbtn = Checkbutton(frame2, 368 hexbtn = Checkbutton(frame2, 375 whitebtn = Button(frame2,
|
/external/python/cpython2/Tools/pynche/ |
D | StripViewer.py | 350 frame2 = Frame(frame) 351 frame2.pack(expand=YES, fill=BOTH) 352 frame2.columnconfigure(0, weight=20) 353 frame2.columnconfigure(2, weight=20) 358 blackbtn = Button(frame2, 364 uwdbtn = Checkbutton(frame2, 368 hexbtn = Checkbutton(frame2, 391 whitebtn = Button(frame2,
|
/external/libvpx/libvpx/vp8/encoder/mips/msa/ |
D | temporal_filter_msa.c | 129 v16i8 frame2 = { 0 }; in temporal_filter_apply_8size_msa() local 161 INSERT_D2_SB(f2, f3, frame2); in temporal_filter_apply_8size_msa() 164 ILVRL_B2_UB(frame1, frame2, frame_l, frame_h); in temporal_filter_apply_8size_msa() 190 UNPCK_UB_SH(frame2, frame2_0_h, frame2_1_h); in temporal_filter_apply_8size_msa() 244 uint8_t *frame2, uint32_t block_size, in vp8_temporal_filter_apply_msa() argument 248 temporal_filter_apply_8size_msa(frame1, stride, frame2, strength, in vp8_temporal_filter_apply_msa() 251 temporal_filter_apply_16size_msa(frame1, stride, frame2, strength, in vp8_temporal_filter_apply_msa() 262 int pixel_value = *frame2++; in vp8_temporal_filter_apply_msa()
|
/external/deqp/modules/egl/ |
D | teglGetFrameTimestampsTests.cpp | 256 void verifyNeighboringFrames (const FrameTimes& frame1, const FrameTimes& frame2, tcu::ResultCollec… in verifyNeighboringFrames() argument 259 …check_lt(result, frame1.swapBufferBeginNs, frame2.swapBufferBeginNs, "Swap begin times not monoton… in verifyNeighboringFrames() 260 check_lt(result, frame1.latch, frame2.latch, "Latch times not monotonic."); in verifyNeighboringFrames() 261 …check_lt(result, frame1.lastCompositionStart, frame2.latch, "Old buffer composited after new buffe… in verifyNeighboringFrames() 262 …check_lt(result, frame1.lastCompositionStart, frame2.firstCompositionStart, "Composition times ove… in verifyNeighboringFrames() 263 check_lt(result, frame1.dequeueReady, frame2.dequeueReady, "Dequeue ready times not monotonic."); in verifyNeighboringFrames() 266 …if (timestampValid(frame1.firstCompositionGpuFinished) && timestampValid(frame2.firstCompositionGp… in verifyNeighboringFrames() 267 …check_lt(result, frame1.firstCompositionGpuFinished, frame2.firstCompositionGpuFinished, "Composit… in verifyNeighboringFrames() 269 if (timestampValid(frame1.displayPresent) && timestampValid(frame2.displayPresent)) in verifyNeighboringFrames() 270 …check_lt(result, frame1.displayPresent, frame2.displayPresent, "Display present times not monotoni… in verifyNeighboringFrames()
|
/external/perfetto/src/ipc/ |
D | buffered_frame_deserializer_unittest.cc | 328 std::vector<char> frame2 = GetSimpleFrame(kMaxCapacity); in TEST() local 330 frame2.begin(), in TEST() 331 frame2.begin() + static_cast<ptrdiff_t>(kMaxCapacity - frame1.size())); in TEST() 333 frame2.begin() + static_cast<ptrdiff_t>(frame2_chunk1.size()), in TEST() 334 frame2.end()); in TEST() 366 ASSERT_TRUE(FrameEq(frame2, *decoded_frame_2)); in TEST()
|
/external/webrtc/audio/utility/ |
D | channel_mixer_unittest.cc | 129 void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { in VerifyFramesAreEqual() argument 130 EXPECT_EQ(frame1.num_channels(), frame2.num_channels()); in VerifyFramesAreEqual() 131 EXPECT_EQ(frame1.samples_per_channel(), frame2.samples_per_channel()); in VerifyFramesAreEqual() 133 const int16_t* frame2_data = frame2.data(); in VerifyFramesAreEqual() 138 EXPECT_EQ(frame1.muted(), frame2.muted()); in VerifyFramesAreEqual()
|
D | audio_frame_operations_unittest.cc | 62 void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) { in VerifyFramesAreEqual() argument 63 EXPECT_EQ(frame1.num_channels_, frame2.num_channels_); in VerifyFramesAreEqual() 64 EXPECT_EQ(frame1.samples_per_channel_, frame2.samples_per_channel_); in VerifyFramesAreEqual() 66 const int16_t* frame2_data = frame2.data(); in VerifyFramesAreEqual() 71 EXPECT_EQ(frame1.muted(), frame2.muted()); in VerifyFramesAreEqual()
|
/external/llvm/test/Transforms/LICM/ |
D | funclet.ll | 67 %.frame2 = alloca i8, align 4 69 %bc2 = bitcast i8* %.frame2 to i32*
|
/external/webrtc/media/base/ |
D | video_broadcaster_unittest.cc | 224 webrtc::VideoFrame frame2 = webrtc::VideoFrame::Builder() in TEST() local 229 broadcaster.OnFrame(frame2); in TEST()
|
/external/skia/tests/ |
D | PathOpsConicIntersectionTest.cpp | 193 const SkDConic frame2[] = { variable 229 frame0, frame1, frame2, frame3, frame4, frame5, frame6 233 (int) SK_ARRAY_COUNT(frame2), (int) SK_ARRAY_COUNT(frame3),
|