/external/libvpx/libvpx/vp8/common/ |
D | swapyv12buffer.c | 13 void vp8_swap_yv12_buffer(YV12_BUFFER_CONFIG *new_frame, in vp8_swap_yv12_buffer() argument 18 last_frame->buffer_alloc = new_frame->buffer_alloc; in vp8_swap_yv12_buffer() 19 new_frame->buffer_alloc = temp; in vp8_swap_yv12_buffer() 22 last_frame->y_buffer = new_frame->y_buffer; in vp8_swap_yv12_buffer() 23 new_frame->y_buffer = temp; in vp8_swap_yv12_buffer() 26 last_frame->u_buffer = new_frame->u_buffer; in vp8_swap_yv12_buffer() 27 new_frame->u_buffer = temp; in vp8_swap_yv12_buffer() 30 last_frame->v_buffer = new_frame->v_buffer; in vp8_swap_yv12_buffer() 31 new_frame->v_buffer = temp; in vp8_swap_yv12_buffer()
|
D | swapyv12buffer.h | 20 void vp8_swap_yv12_buffer(YV12_BUFFER_CONFIG *new_frame,
|
/external/google-breakpad/src/processor/ |
D | stackwalker_amd64.cc | 255 scoped_ptr<StackFrameAMD64> new_frame; in GetCallerFrame() local 261 new_frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info.get())); in GetCallerFrame() 264 if (!new_frame.get()) { in GetCallerFrame() 265 new_frame.reset(GetCallerByFramePointerRecovery(frames)); in GetCallerFrame() 269 if (stack_scan_allowed && !new_frame.get()) { in GetCallerFrame() 270 new_frame.reset(GetCallerByStackScan(frames)); in GetCallerFrame() 274 if (!new_frame.get()) in GetCallerFrame() 282 new_frame->context.rip = static_cast<uint32_t>(new_frame->context.rip); in GetCallerFrame() 283 new_frame->context.rsp = static_cast<uint32_t>(new_frame->context.rsp); in GetCallerFrame() 284 new_frame->context.rbp = static_cast<uint32_t>(new_frame->context.rbp); in GetCallerFrame() [all …]
|
D | stackwalker_x86.cc | 627 scoped_ptr<StackFrameX86> new_frame; in GetCallerFrame() local 633 new_frame.reset(GetCallerByWindowsFrameInfo(frames, windows_frame_info, in GetCallerFrame() 637 if (!new_frame.get()) { in GetCallerFrame() 641 new_frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info)); in GetCallerFrame() 645 if (!new_frame.get()) in GetCallerFrame() 646 new_frame.reset(GetCallerByEBPAtBase(frames, stack_scan_allowed)); in GetCallerFrame() 649 if (!new_frame.get()) in GetCallerFrame() 653 if (new_frame->context.eip == 0) in GetCallerFrame() 659 if (new_frame->context.esp <= last_frame->context.esp) in GetCallerFrame() 667 new_frame->instruction = new_frame->context.eip - 1; in GetCallerFrame() [all …]
|
D | stackwalker_mips.cc | 177 scoped_ptr<StackFrameMIPS> new_frame; in GetCallerFrame() local 183 new_frame.reset(GetCallerByCFIFrameInfo(frames, cfi_frame_info.get())); in GetCallerFrame() 186 if (stack_scan_allowed && !new_frame.get()) { in GetCallerFrame() 187 new_frame.reset(GetCallerByStackScan(frames)); in GetCallerFrame() 191 if (!new_frame.get()) { in GetCallerFrame() 196 if (new_frame->context.epc == 0) { in GetCallerFrame() 203 if (new_frame->context.iregs[MD_CONTEXT_MIPS_REG_SP] <= in GetCallerFrame() 208 return new_frame.release(); in GetCallerFrame()
|
/external/webrtc/webrtc/common_video/ |
D | video_render_frames.cc | 29 int32_t VideoRenderFrames::AddFrame(const VideoFrame& new_frame) { in AddFrame() argument 35 new_frame.render_time_ms() + KOldRenderTimestampMS < time_now) { in AddFrame() 41 new_frame.timestamp()); in AddFrame() 45 if (new_frame.render_time_ms() > time_now + KFutureRenderTimestampMS) { in AddFrame() 48 __FUNCTION__, new_frame.timestamp()); in AddFrame() 52 incoming_frames_.push_back(new_frame); in AddFrame()
|
D | video_render_frames.h | 28 int32_t AddFrame(const VideoFrame& new_frame);
|
/external/tensorflow/tensorflow/examples/android/jni/object_tracking/ |
D | object_tracker.h | 60 virtual void NextFrame(const uint8_t* const new_frame, in NextFrame() argument 63 NextFrame(new_frame, NULL, timestamp, alignment_matrix_2x3); in NextFrame() 74 virtual void NextFrame(const uint8_t* const new_frame, 79 const uint8_t* const new_frame,
|
D | image_data.h | 89 void SetData(const uint8_t* const new_frame, const int stride, in SetData() argument 91 SetData(new_frame, NULL, stride, timestamp, downsample_factor); in SetData() 94 void SetData(const uint8_t* const new_frame, const uint8_t* const uv_frame, in SetData() argument 103 pyramid_sqrt2_[0]->FromArray(new_frame, stride, downsample_factor); in SetData()
|
D | flow_cache.h | 52 void NextFrame(ImageData* const new_frame, in NextFrame() argument 56 optical_flow_.NextFrame(new_frame); in NextFrame()
|
D | object_tracker.cc | 100 void ObjectTracker::NextFrame(const uint8_t* const new_frame, in NextFrame() argument 119 frame2_->SetData(new_frame, uv_frame, frame_width_, timestamp, 1); in NextFrame() 174 const std::string& id, const uint8_t* const new_frame, in RegisterNewObjectWithAppearance() argument 179 image.FromArray(new_frame, frame_width_, 1); in RegisterNewObjectWithAppearance()
|
/external/webrtc/talk/media/webrtc/ |
D | webrtcvideoframe.cc | 152 WebRtcVideoFrame* new_frame = new WebRtcVideoFrame( in Copy() local 154 new_frame->pixel_width_ = pixel_width_; in Copy() 155 new_frame->pixel_height_ = pixel_height_; in Copy() 156 return new_frame; in Copy()
|
/external/python/cpython2/Lib/hotshot/ |
D | stats.py | 40 frame = self.new_frame(filename, lineno, funcname) 54 def new_frame(self, *args): member in StatsLoader
|
/external/webrtc/webrtc/modules/desktop_capture/ |
D | screen_capturer_mac.mm | 439 DesktopFrame* new_frame = queue_.current_frame()->Share(); 440 *new_frame->mutable_updated_region() = region; 443 new_frame = new InvertedDesktopFrame(new_frame); 445 helper_.set_size_most_recent(new_frame->size()); 451 new_frame->set_capture_time_ms( 453 callback_->OnCaptureCompleted(new_frame);
|
/external/libvpx/libvpx/third_party/libwebm/mkvmuxer/ |
D | mkvmuxer.cc | 3524 Frame* const new_frame = new (std::nothrow) Frame(); in AddGenericFrame() local 3525 if (!new_frame || !new_frame->CopyFrom(*frame)) { in AddGenericFrame() 3526 delete new_frame; in AddGenericFrame() 3529 if (!QueueFrame(new_frame)) { in AddGenericFrame() 3530 delete new_frame; in AddGenericFrame() 3554 Frame* const new_frame = new (std::nothrow) Frame(); in AddGenericFrame() local 3555 if (!new_frame || !new_frame->CopyFrom(*frame)) { in AddGenericFrame() 3556 delete new_frame; in AddGenericFrame() 3559 new_frame->set_reference_block_timestamp( in AddGenericFrame() 3561 frame = new_frame; in AddGenericFrame()
|
/external/scapy/doc/notebooks/ |
D | HTTP_2_Tuto.ipynb | 328 "new_frame = None\n", 329 "while isinstance(new_frame, type(None)) or not (\n", 330 " new_frame.type == h2.H2SettingsFrame.type_id \n", 331 " and 'A' in new_frame.flags\n", 333 " if not isinstance(new_frame, type(None)):\n", 335 " if new_frame.type == h2.H2WindowUpdateFrame.type_id:\n", 337 " if new_frame.stream_id == 0:\n", 338 " srv_global_window += new_frame.payload.win_size_incr\n", 341 " elif new_frame.type == h2.H2PingFrame.type_id:\n", 342 " new_flags = new_frame.getfieldval('flags')\n", [all …]
|
/external/webrtc/webrtc/modules/video_coding/ |
D | jitter_buffer.h | 252 void FindAndInsertContinuousFrames(const VCMFrameBuffer& new_frame)
|
D | jitter_buffer.cc | 874 const VCMFrameBuffer& new_frame) { in FindAndInsertContinuousFrames() argument 877 decoding_state.SetState(&new_frame); in FindAndInsertContinuousFrames()
|
/external/webrtc/webrtc/video/ |
D | video_send_stream_tests.cc | 2092 bool new_frame = packets_sent_ == 0 || in CompareConsecutiveFrames() local 2094 EXPECT_EQ(new_frame, video.isFirstPacket); in CompareConsecutiveFrames() 2095 if (!new_frame) { in CompareConsecutiveFrames()
|