Home
last modified time | relevance | path

Searched refs:frame_buffer (Results 1 – 25 of 73) sorted by relevance

123

/external/tflite-support/tensorflow_lite_support/cc/task/vision/core/
Dbase_vision_task_api.h115 const FrameBuffer& frame_buffer, in Preprocess() argument
147 if (IsImagePreprocessingNeeded(frame_buffer, roi)) { in Preprocess()
167 frame_buffer, roi, preprocessed_frame_buffer.get())); in Preprocess()
171 input_data = frame_buffer.plane(0).buffer; in Preprocess()
172 input_data_byte_size = frame_buffer.plane(0).stride.row_stride_bytes * in Preprocess()
173 frame_buffer.dimension().height; in Preprocess()
245 bool IsImagePreprocessingNeeded(const FrameBuffer& frame_buffer, in IsImagePreprocessingNeeded() argument
249 roi.width() != frame_buffer.dimension().width || in IsImagePreprocessingNeeded()
250 roi.height() != frame_buffer.dimension().height) { in IsImagePreprocessingNeeded()
255 if (frame_buffer.orientation() != FrameBuffer::Orientation::kTopLeft || in IsImagePreprocessingNeeded()
[all …]
DBUILD16 ":frame_buffer",
35 name = "frame_buffer",
36 srcs = ["frame_buffer.cc"],
37 hdrs = ["frame_buffer.h"],
/external/libgav1/libgav1/src/
Dyuv_buffer.cc74 FrameBuffer frame_buffer; in Realloc() local
78 &frame_buffer) != kStatusOk) { in Realloc()
82 if (frame_buffer.plane[0] == nullptr || in Realloc()
83 (!is_monochrome && frame_buffer.plane[1] == nullptr) || in Realloc()
84 (!is_monochrome && frame_buffer.plane[2] == nullptr)) { in Realloc()
90 stride_[kPlaneY] = frame_buffer.stride[0]; in Realloc()
91 stride_[kPlaneU] = frame_buffer.stride[1]; in Realloc()
92 stride_[kPlaneV] = frame_buffer.stride[2]; in Realloc()
93 buffer_[kPlaneY] = frame_buffer.plane[0]; in Realloc()
94 buffer_[kPlaneU] = frame_buffer.plane[1]; in Realloc()
[all …]
Dframe_buffer.cc128 Libgav1FrameBuffer* frame_buffer) { in Libgav1SetFrameBuffer() argument
132 frame_buffer == nullptr) { in Libgav1SetFrameBuffer()
139 frame_buffer->plane[0] = libgav1::AlignAddr(y_buffer + info->y_plane_offset, in Libgav1SetFrameBuffer()
141 frame_buffer->plane[1] = libgav1::AlignAddr(u_buffer + info->uv_plane_offset, in Libgav1SetFrameBuffer()
143 frame_buffer->plane[2] = libgav1::AlignAddr(v_buffer + info->uv_plane_offset, in Libgav1SetFrameBuffer()
145 frame_buffer->stride[0] = info->y_stride; in Libgav1SetFrameBuffer()
146 frame_buffer->stride[1] = frame_buffer->stride[2] = info->uv_stride; in Libgav1SetFrameBuffer()
147 frame_buffer->private_data = buffer_private_data; in Libgav1SetFrameBuffer()
Dinternal_frame_buffer_list.cc42 int bottom_border, int stride_alignment, Libgav1FrameBuffer* frame_buffer) { in GetInternalFrameBuffer() argument
47 top_border, bottom_border, stride_alignment, frame_buffer); in GetInternalFrameBuffer()
69 int stride_alignment, Libgav1FrameBuffer* frame_buffer) { in GetFrameBuffer() argument
111 frame_buffer); in GetFrameBuffer()
/external/libgav1/libgav1/examples/
Dgav1_decode_cv_pixel_buffer_pool.cc56 libgav1::FrameBuffer* frame_buffer) { in Gav1DecodeGetCVPixelBuffer() argument
61 top_border, bottom_border, stride_alignment, frame_buffer); in Gav1DecodeGetCVPixelBuffer()
192 libgav1::FrameBuffer* frame_buffer) { in GetCVPixelBuffer() argument
243 frame_buffer->stride[0] = in GetCVPixelBuffer()
245 frame_buffer->plane[0] = static_cast<uint8_t*>( in GetCVPixelBuffer()
248 frame_buffer->stride[1] = 0; in GetCVPixelBuffer()
249 frame_buffer->stride[2] = 0; in GetCVPixelBuffer()
250 frame_buffer->plane[1] = nullptr; in GetCVPixelBuffer()
251 frame_buffer->plane[2] = nullptr; in GetCVPixelBuffer()
253 frame_buffer->stride[1] = in GetCVPixelBuffer()
[all …]
Dfile_writer.cc123 bool FileWriter::WriteFrame(const DecoderBuffer& frame_buffer) { in WriteFrame() argument
134 (frame_buffer.bitdepth == 8) ? sizeof(uint8_t) : sizeof(uint16_t); in WriteFrame()
135 for (int plane_index = 0; plane_index < frame_buffer.NumPlanes(); in WriteFrame()
137 const int height = frame_buffer.displayed_height[plane_index]; in WriteFrame()
138 const int width = frame_buffer.displayed_width[plane_index]; in WriteFrame()
139 const int stride = frame_buffer.stride[plane_index]; in WriteFrame()
140 const uint8_t* const plane_pointer = frame_buffer.plane[plane_index]; in WriteFrame()
/external/deqp-deps/amber/src/
Dverifier_test.cc70 const uint8_t frame_buffer[3][3][4] = { in TEST_F() local
90 static_cast<const void*>(frame_buffer)); in TEST_F()
111 uint8_t frame_buffer[10][10][4] = {}; in TEST_F() local
114 frame_buffer[y][x][0] = 128; in TEST_F()
115 frame_buffer[y][x][1] = 64; in TEST_F()
116 frame_buffer[y][x][2] = 51; in TEST_F()
117 frame_buffer[y][x][3] = 204; in TEST_F()
123 static_cast<const void*>(frame_buffer)); in TEST_F()
144 uint8_t frame_buffer[250][250][4] = {}; in TEST_F() local
148 static_cast<const void*>(frame_buffer)); in TEST_F()
[all …]
/external/webrtc/modules/video_coding/codecs/h264/
Dh264_decoder_impl.cc105 rtc::scoped_refptr<I420Buffer> frame_buffer = in AVGetBuffer2() local
109 int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight(); in AVGetBuffer2()
111 RTC_DCHECK_EQ(frame_buffer->DataU(), frame_buffer->DataY() + y_size); in AVGetBuffer2()
112 RTC_DCHECK_EQ(frame_buffer->DataV(), frame_buffer->DataU() + uv_size); in AVGetBuffer2()
119 av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY(); in AVGetBuffer2()
120 av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY(); in AVGetBuffer2()
121 av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU(); in AVGetBuffer2()
122 av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU(); in AVGetBuffer2()
123 av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV(); in AVGetBuffer2()
124 av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV(); in AVGetBuffer2()
[all …]
/external/webrtc/sdk/objc/native/src/
Dobjc_frame_buffer.mm23 explicit ObjCI420FrameBuffer(id<RTC_OBJC_TYPE(RTCI420Buffer)> frame_buffer)
24 : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
51 ObjCFrameBuffer::ObjCFrameBuffer(id<RTC_OBJC_TYPE(RTCVideoFrameBuffer)> frame_buffer)
52 : frame_buffer_(frame_buffer), width_(frame_buffer.width), height_(frame_buffer.height) {}
/external/webrtc/test/testsupport/
Dvideo_frame_writer.cc75 rtc::Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_); in WriteFrame() local
76 RTC_CHECK_EQ(frame_buffer.size(), frame_writer_->FrameLength()); in WriteFrame()
77 return frame_writer_->WriteFrame(frame_buffer.data()); in WriteFrame()
101 rtc::Buffer frame_buffer = ExtractI420BufferWithSize(frame, width_, height_); in WriteFrame() local
102 RTC_CHECK_EQ(frame_buffer.size(), frame_writer_->FrameLength()); in WriteFrame()
103 return frame_writer_->WriteFrame(frame_buffer.data()); in WriteFrame()
Dyuv_frame_writer.cc53 bool YuvFrameWriterImpl::WriteFrame(uint8_t* frame_buffer) { in WriteFrame() argument
54 RTC_DCHECK(frame_buffer); in WriteFrame()
61 fwrite(frame_buffer, 1, frame_length_in_bytes_, output_file_); in WriteFrame()
Dframe_writer.h35 virtual bool WriteFrame(uint8_t* frame_buffer) = 0;
57 bool WriteFrame(uint8_t* frame_buffer) override;
79 bool WriteFrame(uint8_t* frame_buffer) override;
/external/tensorflow/tensorflow/lite/experimental/microfrontend/
Daudio_microfrontend.cc136 std::vector<std::vector<T>> frame_buffer(num_frames); in GenerateFeatures() local
147 frame_buffer[frame_index].reserve(output.size); in GenerateFeatures()
150 frame_buffer[frame_index].push_back(static_cast<T>(output.values[i]) / in GenerateFeatures()
160 for (anchor = 0; anchor < frame_buffer.size(); anchor += data->frame_stride) { in GenerateFeatures()
165 if (data->zero_padding && (frame < 0 || frame >= frame_buffer.size())) { in GenerateFeatures()
168 feature = &frame_buffer[0]; in GenerateFeatures()
169 } else if (frame >= frame_buffer.size()) { in GenerateFeatures()
170 feature = &frame_buffer[frame_buffer.size() - 1]; in GenerateFeatures()
172 feature = &frame_buffer[frame]; in GenerateFeatures()
/external/tensorflow/tensorflow/lite/experimental/microfrontend/ops/
Daudio_microfrontend_op.cc242 std::vector<std::vector<T>> frame_buffer(num_frames); in Compute() local
252 frame_buffer[frame_index].reserve(output.size); in Compute()
255 frame_buffer[frame_index].push_back(static_cast<T>(output.values[i]) / in Compute()
266 for (anchor = 0; anchor < frame_buffer.size(); anchor += frame_stride_) { in Compute()
271 if (zero_padding_ && (frame < 0 || frame >= frame_buffer.size())) { in Compute()
274 feature = &frame_buffer[0]; in Compute()
275 } else if (frame >= frame_buffer.size()) { in Compute()
276 feature = &frame_buffer[frame_buffer.size() - 1]; in Compute()
278 feature = &frame_buffer[frame]; in Compute()
/external/libgav1/libgav1/src/gav1/
Dframe_buffer.h92 int bottom_border, int stride_alignment, Libgav1FrameBuffer* frame_buffer);
141 Libgav1FrameBuffer* frame_buffer);
169 FrameBuffer* frame_buffer) { in SetFrameBuffer() argument
171 buffer_private_data, frame_buffer); in SetFrameBuffer()
/external/webrtc/test/fuzzers/
Dframe_buffer2_fuzzer.cc72 video_coding::FrameBuffer frame_buffer(time_controller.GetClock(), &timing, in FuzzOneInput() local
89 frame_buffer.InsertFrame(std::move(frame)); in FuzzOneInput()
95 task_queue.PostTask([&task_queue, &frame_buffer, in FuzzOneInput()
98 frame_buffer.NextFrame( in FuzzOneInput()
/external/tflite-support/tensorflow_lite_support/examples/task/vision/desktop/
Dimage_classifier_demo.cc120 std::unique_ptr<FrameBuffer> frame_buffer; in Classify() local
122 frame_buffer = in Classify()
125 frame_buffer = in Classify()
135 image_classifier->Classify(*frame_buffer)); in Classify()
Dimage_segmenter_demo.cc143 std::unique_ptr<FrameBuffer> frame_buffer; in Segment() local
145 frame_buffer = in Segment()
148 frame_buffer = in Segment()
158 image_segmenter->Segment(*frame_buffer)); in Segment()
/external/webrtc/modules/video_coding/
Dsession_info.h42 uint8_t* frame_buffer,
93 size_t InsertBuffer(uint8_t* frame_buffer, PacketIterator packetIterator);
97 uint8_t* frame_buffer);
Dsession_info.cc188 size_t VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer, in InsertBuffer() argument
201 packet.dataPtr = frame_buffer + offset; in InsertBuffer()
222 uint8_t* frame_buffer_ptr = frame_buffer + offset; in InsertBuffer()
246 uint8_t* frame_buffer) { in Insert() argument
249 memcpy(frame_buffer, startCode, kH264StartCodeLengthBytes); in Insert()
251 memcpy(frame_buffer + (insert_start_code ? kH264StartCodeLengthBytes : 0), in Insert()
421 uint8_t* frame_buffer, in InsertPacket() argument
498 size_t returnLength = InsertBuffer(frame_buffer, packet_list_it); in InsertPacket()
/external/tflite-support/tensorflow_lite_support/cc/task/vision/
Dimage_segmenter.cc307 const FrameBuffer& frame_buffer) { in Segment() argument
309 roi.set_width(frame_buffer.dimension().width); in Segment()
310 roi.set_height(frame_buffer.dimension().height); in Segment()
311 return InferWithFallback(frame_buffer, roi); in Segment()
316 const FrameBuffer& frame_buffer, const BoundingBox& /*roi*/) { in Postprocess() argument
332 FrameBuffer::Orientation tensor_orientation = frame_buffer.orientation(); in Postprocess()
343 if (RequireDimensionSwap(frame_buffer.orientation(), in Postprocess()
Dimage_classifier.h97 const FrameBuffer& frame_buffer);
111 const FrameBuffer& frame_buffer, const BoundingBox& roi);
125 const FrameBuffer& frame_buffer, const BoundingBox& roi) override;
Dobject_detector.cc437 const FrameBuffer& frame_buffer) { in Detect() argument
439 roi.set_width(frame_buffer.dimension().width); in Detect()
440 roi.set_height(frame_buffer.dimension().height); in Detect()
443 return Infer(frame_buffer, roi); in Detect()
448 const FrameBuffer& frame_buffer, const BoundingBox& /*roi*/) { in Postprocess() argument
464 frame_buffer.dimension(); in Postprocess()
465 if (RequireDimensionSwap(frame_buffer.orientation(), in Postprocess()
490 /*from_orientation=*/frame_buffer.orientation(), in Postprocess()
/external/tflite-support/tensorflow_lite_support/cc/task/vision/proto/
Ddetections.proto29 // frame_buffer.width) x [0, frame_buffer.height)`, which are the dimensions
30 // of the underlying `frame_buffer` data before any `Orientation` flag gets
39 // For example, if the input `frame_buffer` has its `Orientation` flag set to

123