Home
last modified time | relevance | path

Searched refs:frame_buffer (Results 1 – 25 of 51) sorted by relevance

123

/external/libgav1/libgav1/src/
Dyuv_buffer.cc74 FrameBuffer frame_buffer; in Realloc() local
78 &frame_buffer) != kStatusOk) { in Realloc()
82 if (frame_buffer.plane[0] == nullptr || in Realloc()
83 (!is_monochrome && frame_buffer.plane[1] == nullptr) || in Realloc()
84 (!is_monochrome && frame_buffer.plane[2] == nullptr)) { in Realloc()
90 stride_[kPlaneY] = frame_buffer.stride[0]; in Realloc()
91 stride_[kPlaneU] = frame_buffer.stride[1]; in Realloc()
92 stride_[kPlaneV] = frame_buffer.stride[2]; in Realloc()
93 buffer_[kPlaneY] = frame_buffer.plane[0]; in Realloc()
94 buffer_[kPlaneU] = frame_buffer.plane[1]; in Realloc()
[all …]
Dframe_buffer.cc128 Libgav1FrameBuffer* frame_buffer) { in Libgav1SetFrameBuffer() argument
132 frame_buffer == nullptr) { in Libgav1SetFrameBuffer()
139 frame_buffer->plane[0] = libgav1::AlignAddr(y_buffer + info->y_plane_offset, in Libgav1SetFrameBuffer()
141 frame_buffer->plane[1] = libgav1::AlignAddr(u_buffer + info->uv_plane_offset, in Libgav1SetFrameBuffer()
143 frame_buffer->plane[2] = libgav1::AlignAddr(v_buffer + info->uv_plane_offset, in Libgav1SetFrameBuffer()
145 frame_buffer->stride[0] = info->y_stride; in Libgav1SetFrameBuffer()
146 frame_buffer->stride[1] = frame_buffer->stride[2] = info->uv_stride; in Libgav1SetFrameBuffer()
147 frame_buffer->private_data = buffer_private_data; in Libgav1SetFrameBuffer()
Dinternal_frame_buffer_list.cc42 int bottom_border, int stride_alignment, Libgav1FrameBuffer* frame_buffer) { in GetInternalFrameBuffer() argument
47 top_border, bottom_border, stride_alignment, frame_buffer); in GetInternalFrameBuffer()
69 int stride_alignment, Libgav1FrameBuffer* frame_buffer) { in GetFrameBuffer() argument
111 frame_buffer); in GetFrameBuffer()
Dinternal_frame_buffer_list.h38 int bottom_border, int stride_alignment, Libgav1FrameBuffer* frame_buffer);
65 Libgav1FrameBuffer* frame_buffer);
/external/libgav1/libgav1/examples/
Dgav1_decode_cv_pixel_buffer_pool.cc56 libgav1::FrameBuffer* frame_buffer) { in Gav1DecodeGetCVPixelBuffer() argument
61 top_border, bottom_border, stride_alignment, frame_buffer); in Gav1DecodeGetCVPixelBuffer()
192 libgav1::FrameBuffer* frame_buffer) { in GetCVPixelBuffer() argument
243 frame_buffer->stride[0] = in GetCVPixelBuffer()
245 frame_buffer->plane[0] = static_cast<uint8_t*>( in GetCVPixelBuffer()
248 frame_buffer->stride[1] = 0; in GetCVPixelBuffer()
249 frame_buffer->stride[2] = 0; in GetCVPixelBuffer()
250 frame_buffer->plane[1] = nullptr; in GetCVPixelBuffer()
251 frame_buffer->plane[2] = nullptr; in GetCVPixelBuffer()
253 frame_buffer->stride[1] = in GetCVPixelBuffer()
[all …]
Dfile_writer.cc123 bool FileWriter::WriteFrame(const DecoderBuffer& frame_buffer) { in WriteFrame() argument
134 (frame_buffer.bitdepth == 8) ? sizeof(uint8_t) : sizeof(uint16_t); in WriteFrame()
135 for (int plane_index = 0; plane_index < frame_buffer.NumPlanes(); in WriteFrame()
137 const int height = frame_buffer.displayed_height[plane_index]; in WriteFrame()
138 const int width = frame_buffer.displayed_width[plane_index]; in WriteFrame()
139 const int stride = frame_buffer.stride[plane_index]; in WriteFrame()
140 const uint8_t* const plane_pointer = frame_buffer.plane[plane_index]; in WriteFrame()
Dgav1_decode_cv_pixel_buffer_pool.h36 libgav1::FrameBuffer* frame_buffer);
63 libgav1::FrameBuffer* frame_buffer);
/external/deqp-deps/amber/src/
Dverifier_test.cc68 const uint8_t frame_buffer[3][3][4] = { in TEST_F() local
88 static_cast<const void*>(frame_buffer)); in TEST_F()
109 uint8_t frame_buffer[10][10][4] = {}; in TEST_F() local
112 frame_buffer[y][x][0] = 128; in TEST_F()
113 frame_buffer[y][x][1] = 64; in TEST_F()
114 frame_buffer[y][x][2] = 51; in TEST_F()
115 frame_buffer[y][x][3] = 204; in TEST_F()
121 static_cast<const void*>(frame_buffer)); in TEST_F()
142 uint8_t frame_buffer[250][250][4] = {}; in TEST_F() local
146 static_cast<const void*>(frame_buffer)); in TEST_F()
[all …]
/external/tensorflow/tensorflow/lite/experimental/microfrontend/
Daudio_microfrontend.cc133 std::vector<std::vector<T>> frame_buffer(num_frames); in GenerateFeatures() local
144 frame_buffer[frame_index].reserve(output.size); in GenerateFeatures()
147 frame_buffer[frame_index].push_back(static_cast<T>(output.values[i]) / in GenerateFeatures()
157 for (anchor = 0; anchor < frame_buffer.size(); anchor += data->frame_stride) { in GenerateFeatures()
162 if (data->zero_padding && (frame < 0 || frame >= frame_buffer.size())) { in GenerateFeatures()
165 feature = &frame_buffer[0]; in GenerateFeatures()
166 } else if (frame >= frame_buffer.size()) { in GenerateFeatures()
167 feature = &frame_buffer[frame_buffer.size() - 1]; in GenerateFeatures()
169 feature = &frame_buffer[frame]; in GenerateFeatures()
/external/tensorflow/tensorflow/lite/experimental/microfrontend/ops/
Daudio_microfrontend_op.cc242 std::vector<std::vector<T>> frame_buffer(num_frames); in Compute() local
252 frame_buffer[frame_index].reserve(output.size); in Compute()
255 frame_buffer[frame_index].push_back(static_cast<T>(output.values[i]) / in Compute()
266 for (anchor = 0; anchor < frame_buffer.size(); anchor += frame_stride_) { in Compute()
271 if (zero_padding_ && (frame < 0 || frame >= frame_buffer.size())) { in Compute()
274 feature = &frame_buffer[0]; in Compute()
275 } else if (frame >= frame_buffer.size()) { in Compute()
276 feature = &frame_buffer[frame_buffer.size() - 1]; in Compute()
278 feature = &frame_buffer[frame]; in Compute()
/external/webrtc/webrtc/modules/video_coding/
Dsession_info.h50 uint8_t* frame_buffer,
59 size_t BuildVP8FragmentationHeader(uint8_t* frame_buffer,
119 size_t InsertBuffer(uint8_t* frame_buffer, PacketIterator packetIterator);
123 uint8_t* frame_buffer);
Dsession_info.cc157 size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer, in InsertBuffer() argument
170 packet.dataPtr = frame_buffer + offset; in InsertBuffer()
191 uint8_t* frame_buffer_ptr = frame_buffer + offset; in InsertBuffer()
215 uint8_t* frame_buffer) { in Insert() argument
218 memcpy(frame_buffer, startCode, kH264StartCodeLengthBytes); in Insert()
220 memcpy(frame_buffer + (insert_start_code ? kH264StartCodeLengthBytes : 0), in Insert()
334 uint8_t* frame_buffer, in BuildVP8FragmentationHeader() argument
351 (*it).dataPtr - frame_buffer; in BuildVP8FragmentationHeader()
471 uint8_t* frame_buffer, in InsertPacket() argument
546 size_t returnLength = InsertBuffer(frame_buffer, packet_list_it); in InsertPacket()
Dvideo_coding.gypi35 'frame_buffer.h',
62 'frame_buffer.cc',
/external/libgav1/libgav1/src/gav1/
Dframe_buffer.h92 int bottom_border, int stride_alignment, Libgav1FrameBuffer* frame_buffer);
141 Libgav1FrameBuffer* frame_buffer);
169 FrameBuffer* frame_buffer) { in SetFrameBuffer() argument
171 buffer_private_data, frame_buffer); in SetFrameBuffer()
/external/webrtc/talk/app/webrtc/java/jni/
Dandroidmediadecoder_jni.cc625 rtc::scoped_refptr<webrtc::VideoFrameBuffer> frame_buffer; in DeliverPendingOutputs() local
652 frame_buffer = surface_texture_helper_->CreateTextureFrame( in DeliverPendingOutputs()
689 frame_buffer = decoded_frame_pool_.CreateBuffer(width, height); in DeliverPendingOutputs()
701 frame_buffer->MutableData(webrtc::kYPlane), in DeliverPendingOutputs()
702 frame_buffer->stride(webrtc::kYPlane), in DeliverPendingOutputs()
703 frame_buffer->MutableData(webrtc::kUPlane), in DeliverPendingOutputs()
704 frame_buffer->stride(webrtc::kUPlane), in DeliverPendingOutputs()
705 frame_buffer->MutableData(webrtc::kVPlane), in DeliverPendingOutputs()
706 frame_buffer->stride(webrtc::kVPlane), in DeliverPendingOutputs()
715 frame_buffer->MutableData(webrtc::kYPlane), in DeliverPendingOutputs()
[all …]
/external/webrtc/webrtc/test/testsupport/
Dframe_writer.cc53 bool FrameWriterImpl::WriteFrame(uint8_t* frame_buffer) { in WriteFrame() argument
54 assert(frame_buffer); in WriteFrame()
59 size_t bytes_written = fwrite(frame_buffer, 1, frame_length_in_bytes_, in WriteFrame()
Dframe_writer.h35 virtual bool WriteFrame(uint8_t* frame_buffer) = 0;
57 bool WriteFrame(uint8_t* frame_buffer) override;
/external/webrtc/talk/media/base/
Dyuvframegenerator.cc78 void YuvFrameGenerator::GenerateNextFrame(uint8_t* frame_buffer, in GenerateNextFrame() argument
98 memcpy(frame_buffer, y_data_, size); in GenerateNextFrame()
99 frame_buffer += size; in GenerateNextFrame()
100 memcpy(frame_buffer, u_data_, qsize); in GenerateNextFrame()
101 frame_buffer += qsize; in GenerateNextFrame()
102 memcpy(frame_buffer, v_data_, qsize); in GenerateNextFrame()
/external/u-boot/drivers/video/
Dtegra.c33 fdt_addr_t frame_buffer; /* Address of frame buffer */ member
230 win->phys_addr = priv->frame_buffer; in setup_window()
269 priv->frame_buffer = (u32)default_lcd_base; in tegra_display_probe()
321 mmu_set_region_dcache_behaviour(priv->frame_buffer, plat->size, in tegra_lcd_probe()
330 debug("LCD frame buffer at %pa, size %x\n", &priv->frame_buffer, in tegra_lcd_probe()
/external/libaom/libaom/av1/common/
Dthread_common.c140 YV12_BUFFER_CONFIG *frame_buffer, in loop_filter_data_reset() argument
143 lf_data->frame_buffer = frame_buffer; in loop_filter_data_reset()
267 const YV12_BUFFER_CONFIG *const frame_buffer, AV1_COMMON *const cm, in thread_loop_filter_rows() argument
290 av1_setup_dst_planes(planes, cm->seq_params.sb_size, frame_buffer, in thread_loop_filter_rows()
310 av1_setup_dst_planes(planes, cm->seq_params.sb_size, frame_buffer, in thread_loop_filter_rows()
326 thread_loop_filter_rows(lf_data->frame_buffer, lf_data->cm, lf_data->planes, in loop_filter_row_worker()
333 const YV12_BUFFER_CONFIG *const frame_buffer, AV1_COMMON *const cm, in thread_loop_filter_bitmask_rows() argument
357 av1_setup_dst_planes(planes, BLOCK_64X64, frame_buffer, mi_row, in thread_loop_filter_bitmask_rows()
377 av1_setup_dst_planes(planes, BLOCK_64X64, frame_buffer, mi_row, in thread_loop_filter_bitmask_rows()
393 thread_loop_filter_bitmask_rows(lf_data->frame_buffer, lf_data->cm, in loop_filter_bitmask_row_worker()
Dav1_loopfilter.c659 static void loop_filter_rows(YV12_BUFFER_CONFIG *frame_buffer, AV1_COMMON *cm, in loop_filter_rows() argument
682 av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, 0, 0, in loop_filter_rows()
691 av1_setup_dst_planes(pd, BLOCK_64X64, frame_buffer, mi_row, mi_col, in loop_filter_rows()
696 av1_setup_dst_planes(pd, BLOCK_64X64, frame_buffer, mi_row, in loop_filter_rows()
702 av1_setup_dst_planes(pd, BLOCK_64X64, frame_buffer, mi_row, in loop_filter_rows()
725 av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row, in loop_filter_rows()
731 av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, in loop_filter_rows()
739 av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row, in loop_filter_rows()
748 av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row, in loop_filter_rows()
758 av1_setup_dst_planes(pd, cm->seq_params.sb_size, frame_buffer, mi_row, in loop_filter_rows()
/external/webrtc/webrtc/common_video/libyuv/
Dscaler_unittest.cc360 rtc::scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[src_required_size]); in ScaleSequence() local
366 if (fread(frame_buffer.get(), 1, src_required_size, source_file) != in ScaleSequence()
370 input_frame.CreateFrame(frame_buffer.get(), in ScaleSequence()
371 frame_buffer.get() + size_y, in ScaleSequence()
372 frame_buffer.get() + size_y + size_uv, in ScaleSequence()
/external/libvpx/libvpx/vp9/common/
Dvp9_loopfilter.h141 YV12_BUFFER_CONFIG *frame_buffer; member
151 LFWorkerData *lf_data, YV12_BUFFER_CONFIG *frame_buffer,
/external/webrtc/webrtc/modules/video_coding/codecs/vp8/
Dvp8_sequence_coder.cc150 rtc::scoped_ptr<uint8_t[]> frame_buffer(new uint8_t[length]); in SequenceCoder() local
166 if (fread(frame_buffer.get(), 1, length, input_file) != length) in SequenceCoder()
169 webrtc::ConvertToI420(webrtc::kI420, frame_buffer.get(), 0, 0, width, in SequenceCoder()
/external/webrtc/webrtc/test/testsupport/mock/
Dmock_frame_writer.h24 MOCK_METHOD1(WriteFrame, bool(uint8_t* frame_buffer));

123