/device/generic/vulkan-cereal/host-common/ |
D | MediaTexturePool.cpp | 73 TextureFrame frame{textures[2 * i], textures[2 * i + 1]}; in getTextureFrame() local 79 TextureFrame frame = ph->front(); in getTextureFrame() local 86 void MediaTexturePool::saveDecodedFrameToTexture(TextureFrame frame, in saveDecodedFrameToTexture() 98 void MediaTexturePool::putTextureFrame(TextureFrame frame) { in putTextureFrame() 117 void MediaTexturePool::deleteTextures(TextureFrame frame) { in deleteTextures() 134 for (auto& frame : myFramePool) { in cleanUpTextures() local
|
D | H264NaluParser.cpp | 85 bool H264NaluParser::checkSpsFrame(const uint8_t* frame, size_t szBytes) { in checkSpsFrame() 95 bool H264NaluParser::checkIFrame(const uint8_t* frame, size_t szBytes) { in checkIFrame() 105 bool H264NaluParser::checkPpsFrame(const uint8_t* frame, size_t szBytes) { in checkPpsFrame() 115 H264NaluParser::H264NaluType H264NaluParser::getFrameNaluType(const uint8_t* frame, size_t szBytes,… in getFrameNaluType() 159 const uint8_t* H264NaluParser::getNextStartCodeHeader(const uint8_t* frame, size_t szBytes) { in getNextStartCodeHeader()
|
D | MediaSnapshotState.cpp | 42 bool MediaSnapshotState::savePacket(const uint8_t* frame, in savePacket() 61 FrameInfo frame{ in saveDecodedFrame() local 73 FrameInfo frame{std::vector<uint8_t>{}, in saveDecodedFrame() local 113 void MediaSnapshotState::loadFrameInfo(base::Stream* stream, FrameInfo& frame) { in loadFrameInfo()
|
D | MediaHostRenderer.cpp | 61 void MediaHostRenderer::saveDecodedFrameToTexture(TextureFrame frame, in saveDecodedFrameToTexture() 94 TextureFrame frame) { in renderToHostColorBufferWithTextures()
|
D | MediaSnapshotHelper.cpp | 35 void MediaSnapshotHelper::savePacket(const uint8_t* frame, in savePacket() 72 void MediaSnapshotHelper::saveH264Packet(const uint8_t* frame, in saveH264Packet()
|
D | MediaH264DecoderGeneric.cpp | 201 const uint8_t* frame = param.pData; in decodeFrame() local 264 MediaSnapshotState::FrameInfo frame; in fetchAllFrames() local 373 MediaSnapshotState::FrameInfo frame; in oneShotDecode() local
|
D | MediaHostRenderer.h | 42 void putTextureFrame(MediaTexturePool::TextureFrame frame) { in putTextureFrame()
|
D | MediaH264DecoderFfmpeg.cpp | 156 bool MediaH264DecoderFfmpeg::checkWhetherConfigChanged(const uint8_t* frame, size_t szBytes) { in checkWhetherConfigChanged() 194 const uint8_t* frame, in decodeFrameDirect() 212 const uint8_t* frame = param.pData; in decodeFrameInternal() local
|
D | MediaVpxDecoderGeneric.cpp | 234 MediaSnapshotState::FrameInfo frame; in fetchAllFrames() local 333 MediaSnapshotState::FrameInfo frame; in oneShotDecode() local
|
D | MediaVideoHelper.h | 39 virtual void decode(const uint8_t* frame, in decode()
|
D | MediaSnapshotHelper.h | 63 void saveDecodedFrame(MediaSnapshotState::FrameInfo frame) { in saveDecodedFrame()
|
D | MediaH264DecoderCuvid.cpp | 212 const uint8_t* frame = param.pData; in decodeFrame() local 248 const uint8_t* frame, in decodeFrameInternal() 775 auto& frame = mSnapshotState.savedFrames[i]; in load() local
|
/device/google/cuttlefish/host/frontend/webrtc/lib/ |
D | camera_streamer.cpp | 56 auto frame = client_frame.video_frame_buffer()->ToI420().get(); in OnFrame() local 107 const webrtc::I420BufferInterface* frame) { in VsockSendYUVFrame()
|
D | audio_track_source_impl.h | 77 void OnFrame(std::shared_ptr<AudioFrameBuffer> frame, in OnFrame()
|
D | local_recorder.cpp | 193 void LocalRecorder::Display::OnFrame(const webrtc::VideoFrame& frame) { in OnFrame() 209 std::unique_ptr<webrtc::VideoFrame> frame; in EncoderLoop() local
|
D | video_track_source_impl.h | 67 void OnFrame(std::shared_ptr<VideoFrameBuffer> frame, in OnFrame()
|
D | video_track_source_impl.cpp | 57 void VideoTrackSourceImpl::OnFrame(std::shared_ptr<VideoFrameBuffer> frame, in OnFrame()
|
D | audio_track_source_impl.cpp | 54 void AudioTrackSourceImpl::OnFrame(std::shared_ptr<AudioFrameBuffer> frame, in OnFrame()
|
/device/generic/goldfish/camera/ |
D | EmulatedQemuCameraDevice.cpp | 234 uint8_t* frame = frameBufferPair->first; in getCurrentFrame() local 284 uint8_t* frame = frameBufferPair->first; in getCurrentFrame() local
|
D | QemuClient.cpp | 534 const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData); in queryFrame() local 599 const uint8_t* frame = reinterpret_cast<const uint8_t*>(query.mReplyData); in queryFrame() local
|
/device/generic/goldfish-opengl/system/hwc2/ |
D | Layer.cpp | 137 HWC2::Error Layer::setDisplayFrame(hwc_rect_t frame) { in setDisplayFrame() 148 auto frame = mDisplayFrame; in getDisplayFrame() local
|
/device/generic/goldfish-opengl/system/hwc3/ |
D | Layer.cpp | 163 HWC3::Error Layer::setDisplayFrame(common::Rect frame) { in setDisplayFrame() 174 auto frame = mDisplayFrame; in getDisplayFrame() local
|
/device/generic/goldfish-opengl/system/codecs/c2/decoders/avcdec/ |
D | GoldfishH264Helper.cpp | 171 bool GoldfishH264Helper::isSpsFrame(const uint8_t* frame, int inSize) { in isSpsFrame() 189 bool GoldfishH264Helper::decodeHeader(const uint8_t *frame, int inSize) { in decodeHeader()
|
/device/generic/goldfish-opengl/system/codecs/c2/decoders/hevcdec/ |
D | GoldfishHevcHelper.cpp | 172 bool GoldfishHevcHelper::isVpsFrame(const uint8_t* frame, int inSize) { in isVpsFrame() 191 bool GoldfishHevcHelper::decodeHeader(const uint8_t *frame, int inSize, in decodeHeader()
|
/device/generic/vulkan-cereal/third-party/angle/samples/capture_replay/ |
D | CaptureReplay.cpp | 63 uint32_t frame = in draw() local
|