1 /* GStreamer video frame cropping
2 * Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
17 * Boston, MA 02110-1301, USA.
18 */
19
20 /**
21 * SECTION:element-videocrop
22 * @title: videocrop
23 * @see_also: #GstVideoBox
24 *
25 * This element crops video frames, meaning it can remove parts of the
26 * picture on the left, right, top or bottom of the picture and output
27 * a smaller picture than the input picture, with the unwanted parts at the
28 * border removed.
29 *
30 * The videocrop element is similar to the videobox element, but its main
31 * goal is to support a multitude of formats as efficiently as possible.
32 * Unlike videbox, it cannot add borders to the picture and unlike videbox
33 * it will always output images in exactly the same format as the input image.
34 *
35 * If there is nothing to crop, the element will operate in pass-through mode.
36 *
37 * Note that no special efforts are made to handle chroma-subsampled formats
38 * in the case of odd-valued cropping and compensate for sub-unit chroma plane
39 * shifts for such formats in the case where the #GstVideoCrop:left or
40 * #GstVideoCrop:top property is set to an odd number. This doesn't matter for
41 * most use cases, but it might matter for yours.
42 *
43 * ## Example launch line
44 * |[
45 * gst-launch-1.0 -v videotestsrc ! videocrop top=42 left=1 right=4 bottom=0 ! ximagesink
46 * ]|
47 *
48 */
49
50 /* TODO:
51 * - for packed formats, we could avoid memcpy() in case crop_left
52 * and crop_right are 0 and just create a sub-buffer of the input
53 * buffer
54 */
55
56 #ifdef HAVE_CONFIG_H
57 #include "config.h"
58 #endif
59
60 #include <gst/gst.h>
61 #include <gst/video/video.h>
62
63 #include "gstvideocrop.h"
64 #include "gstaspectratiocrop.h"
65 /* include private header which contains the supported formats */
66 #include "gstvideocrop-private.h"
67
68 #include <string.h>
69
70 GST_DEBUG_CATEGORY_STATIC (videocrop_debug);
71 #define GST_CAT_DEFAULT videocrop_debug
72
73 enum
74 {
75 PROP_0,
76 PROP_LEFT,
77 PROP_RIGHT,
78 PROP_TOP,
79 PROP_BOTTOM
80 };
81
82 static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
83 GST_PAD_SRC,
84 GST_PAD_ALWAYS,
85 GST_STATIC_CAPS (VIDEO_CROP_CAPS)
86 );
87
88 static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
89 GST_PAD_SINK,
90 GST_PAD_ALWAYS,
91 GST_STATIC_CAPS (VIDEO_CROP_CAPS)
92 );
93
94 #define gst_video_crop_parent_class parent_class
95 G_DEFINE_TYPE (GstVideoCrop, gst_video_crop, GST_TYPE_VIDEO_FILTER);
96 GST_ELEMENT_REGISTER_DEFINE (videocrop, "videocrop", GST_RANK_NONE,
97 GST_TYPE_VIDEO_CROP);
98
99 static void gst_video_crop_set_property (GObject * object, guint prop_id,
100 const GValue * value, GParamSpec * pspec);
101 static void gst_video_crop_get_property (GObject * object, guint prop_id,
102 GValue * value, GParamSpec * pspec);
103
104 static void gst_video_crop_before_transform (GstBaseTransform * trans,
105 GstBuffer * in);
106 static GstCaps *gst_video_crop_transform_caps (GstBaseTransform * trans,
107 GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps);
108 static gboolean gst_video_crop_src_event (GstBaseTransform * trans,
109 GstEvent * event);
110
111 static gboolean gst_video_crop_set_info (GstVideoFilter * vfilter, GstCaps * in,
112 GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info);
113 static GstFlowReturn gst_video_crop_transform_frame (GstVideoFilter * vfilter,
114 GstVideoFrame * in_frame, GstVideoFrame * out_frame);
115
116 static gboolean gst_video_crop_decide_allocation (GstBaseTransform * trans,
117 GstQuery * query);
118 static gboolean gst_video_crop_propose_allocation (GstBaseTransform * trans,
119 GstQuery * decide_query, GstQuery * query);
120 static GstFlowReturn gst_video_crop_transform_ip (GstBaseTransform * trans,
121 GstBuffer * buf);
122
123 static gboolean
gst_video_crop_src_event(GstBaseTransform * trans,GstEvent * event)124 gst_video_crop_src_event (GstBaseTransform * trans, GstEvent * event)
125 {
126 GstEvent *new_event;
127 GstStructure *new_structure;
128 const GstStructure *structure;
129 const gchar *event_name;
130 double pointer_x;
131 double pointer_y;
132
133 GstVideoCrop *vcrop = GST_VIDEO_CROP (trans);
134 new_event = NULL;
135
136 GST_OBJECT_LOCK (vcrop);
137 if (GST_EVENT_TYPE (event) == GST_EVENT_NAVIGATION &&
138 (vcrop->crop_left != 0 || vcrop->crop_top != 0)) {
139 structure = gst_event_get_structure (event);
140 event_name = gst_structure_get_string (structure, "event");
141
142 if (event_name &&
143 (strcmp (event_name, "mouse-move") == 0 ||
144 strcmp (event_name, "mouse-button-press") == 0 ||
145 strcmp (event_name, "mouse-button-release") == 0)) {
146
147 if (gst_structure_get_double (structure, "pointer_x", &pointer_x) &&
148 gst_structure_get_double (structure, "pointer_y", &pointer_y)) {
149
150 new_structure = gst_structure_copy (structure);
151 gst_structure_set (new_structure,
152 "pointer_x", G_TYPE_DOUBLE, (double) (pointer_x + vcrop->crop_left),
153 "pointer_y", G_TYPE_DOUBLE, (double) (pointer_y + vcrop->crop_top),
154 NULL);
155
156 new_event = gst_event_new_navigation (new_structure);
157 gst_event_unref (event);
158 } else {
159 GST_WARNING_OBJECT (vcrop, "Failed to read navigation event");
160 }
161 }
162 }
163
164 GST_OBJECT_UNLOCK (vcrop);
165
166 return GST_BASE_TRANSFORM_CLASS (parent_class)->src_event (trans,
167 (new_event ? new_event : event));
168 }
169
170 static void
gst_video_crop_class_init(GstVideoCropClass * klass)171 gst_video_crop_class_init (GstVideoCropClass * klass)
172 {
173 GObjectClass *gobject_class;
174 GstElementClass *element_class;
175 GstBaseTransformClass *basetransform_class;
176 GstVideoFilterClass *vfilter_class;
177
178 gobject_class = (GObjectClass *) klass;
179 element_class = (GstElementClass *) klass;
180 basetransform_class = (GstBaseTransformClass *) klass;
181 vfilter_class = (GstVideoFilterClass *) klass;
182
183 gobject_class->set_property = gst_video_crop_set_property;
184 gobject_class->get_property = gst_video_crop_get_property;
185
186 g_object_class_install_property (gobject_class, PROP_LEFT,
187 g_param_spec_int ("left", "Left",
188 "Pixels to crop at left (-1 to auto-crop)", -1, G_MAXINT, 0,
189 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
190 GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
191 g_object_class_install_property (gobject_class, PROP_RIGHT,
192 g_param_spec_int ("right", "Right",
193 "Pixels to crop at right (-1 to auto-crop)", -1, G_MAXINT, 0,
194 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
195 GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
196 g_object_class_install_property (gobject_class, PROP_TOP,
197 g_param_spec_int ("top", "Top", "Pixels to crop at top (-1 to auto-crop)",
198 -1, G_MAXINT, 0,
199 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
200 GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
201 g_object_class_install_property (gobject_class, PROP_BOTTOM,
202 g_param_spec_int ("bottom", "Bottom",
203 "Pixels to crop at bottom (-1 to auto-crop)", -1, G_MAXINT, 0,
204 G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
205 GST_PARAM_MUTABLE_PLAYING | GST_PARAM_CONTROLLABLE));
206
207 gst_element_class_add_static_pad_template (element_class, &sink_template);
208 gst_element_class_add_static_pad_template (element_class, &src_template);
209 gst_element_class_set_static_metadata (element_class, "Crop",
210 "Filter/Effect/Video",
211 "Crops video into a user-defined region",
212 "Tim-Philipp Müller <tim centricular net>");
213
214 basetransform_class->before_transform =
215 GST_DEBUG_FUNCPTR (gst_video_crop_before_transform);
216 basetransform_class->transform_ip_on_passthrough = FALSE;
217 basetransform_class->transform_caps =
218 GST_DEBUG_FUNCPTR (gst_video_crop_transform_caps);
219 basetransform_class->src_event = GST_DEBUG_FUNCPTR (gst_video_crop_src_event);
220 basetransform_class->decide_allocation =
221 GST_DEBUG_FUNCPTR (gst_video_crop_decide_allocation);
222 basetransform_class->propose_allocation =
223 GST_DEBUG_FUNCPTR (gst_video_crop_propose_allocation);
224 basetransform_class->transform_ip =
225 GST_DEBUG_FUNCPTR (gst_video_crop_transform_ip);
226
227 vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_crop_set_info);
228 vfilter_class->transform_frame =
229 GST_DEBUG_FUNCPTR (gst_video_crop_transform_frame);
230 }
231
232 static void
gst_video_crop_init(GstVideoCrop * vcrop)233 gst_video_crop_init (GstVideoCrop * vcrop)
234 {
235 GST_DEBUG_CATEGORY_INIT (videocrop_debug, "videocrop", 0, "videocrop");
236
237 vcrop->crop_right = 0;
238 vcrop->crop_left = 0;
239 vcrop->crop_top = 0;
240 vcrop->crop_bottom = 0;
241 }
242
243 #define ROUND_DOWN_2(n) ((n)&(~1))
244
245 static void
gst_video_crop_transform_packed_complex(GstVideoCrop * vcrop,GstVideoFrame * in_frame,GstVideoFrame * out_frame,gint x,gint y)246 gst_video_crop_transform_packed_complex (GstVideoCrop * vcrop,
247 GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
248 {
249 guint8 *in_data, *out_data;
250 guint i, dx;
251 gint width, height;
252 gint in_stride;
253 gint out_stride;
254
255 width = GST_VIDEO_FRAME_WIDTH (out_frame);
256 height = GST_VIDEO_FRAME_HEIGHT (out_frame);
257
258 in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
259 out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
260
261 in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
262 out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
263
264 in_data += vcrop->crop_top * in_stride;
265
266 /* rounding down here so we end up at the start of a macro-pixel and not
267 * in the middle of one */
268 in_data += ROUND_DOWN_2 (vcrop->crop_left) *
269 GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
270
271 dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);
272
273 /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5]
274 * YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
275 if ((vcrop->crop_left % 2) != 0) {
276 for (i = 0; i < height; ++i) {
277 gint j;
278
279 memcpy (out_data, in_data, dx);
280
281 /* move just the Y samples one pixel to the left, don't worry about
282 * chroma shift */
283 for (j = vcrop->macro_y_off; j < out_stride - 2; j += 2)
284 out_data[j] = in_data[j + 2];
285
286 in_data += in_stride;
287 out_data += out_stride;
288 }
289 } else {
290 for (i = 0; i < height; ++i) {
291 memcpy (out_data, in_data, dx);
292 in_data += in_stride;
293 out_data += out_stride;
294 }
295 }
296 }
297
298 static void
gst_video_crop_transform_packed_simple(GstVideoCrop * vcrop,GstVideoFrame * in_frame,GstVideoFrame * out_frame,gint x,gint y)299 gst_video_crop_transform_packed_simple (GstVideoCrop * vcrop,
300 GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
301 {
302 guint8 *in_data, *out_data;
303 gint width, height;
304 guint i, dx;
305 gint in_stride, out_stride;
306
307 width = GST_VIDEO_FRAME_WIDTH (out_frame);
308 height = GST_VIDEO_FRAME_HEIGHT (out_frame);
309
310 in_data = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
311 out_data = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
312
313 in_stride = GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
314 out_stride = GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
315
316 in_data += (vcrop->crop_top + y) * in_stride;
317 in_data +=
318 (vcrop->crop_left + x) * GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, 0);
319
320 dx = width * GST_VIDEO_FRAME_COMP_PSTRIDE (out_frame, 0);
321
322 for (i = 0; i < height; ++i) {
323 memcpy (out_data, in_data, dx);
324 in_data += in_stride;
325 out_data += out_stride;
326 }
327 }
328
329 static void
gst_video_crop_transform_planar(GstVideoCrop * vcrop,GstVideoFrame * in_frame,GstVideoFrame * out_frame,gint x,gint y)330 gst_video_crop_transform_planar (GstVideoCrop * vcrop,
331 GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
332 {
333 const GstVideoFormatInfo *format_info;
334 gint crop_top, crop_left;
335 guint p;
336
337 format_info = in_frame->info.finfo;
338 crop_left = vcrop->crop_left + x;
339 crop_top = vcrop->crop_top + y;
340
341 for (p = 0; p < GST_VIDEO_FRAME_N_PLANES (in_frame); ++p) {
342 guint8 *plane_in, *plane_out;
343 guint sub_w_factor, sub_h_factor;
344 guint subsampled_crop_left, subsampled_crop_top;
345 guint copy_width;
346 gint i;
347 gsize bytes_per_pixel;
348
349 /* plane */
350 plane_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, p);
351 plane_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, p);
352
353 /* To support > 8bit, we need to add a byte-multiplier that specifies
354 * how many bytes are used per pixel value */
355 bytes_per_pixel = GST_VIDEO_FRAME_COMP_PSTRIDE (in_frame, p);
356
357 /* apply crop top/left
358 * crop_top and crop_left have to be rounded down to the corresponding
359 * subsampling factor, since, e.g.: the first line in a subsampled plane
360 * describes 2 lines in the actual image. A crop_top of 1 thus should
361 * not shift the pointer of the input plane. */
362 sub_w_factor = 1 << GST_VIDEO_FORMAT_INFO_W_SUB (format_info, p);
363 sub_h_factor = 1 << GST_VIDEO_FORMAT_INFO_H_SUB (format_info, p);
364 subsampled_crop_left = GST_ROUND_DOWN_N ((guint) crop_left, sub_w_factor);
365 subsampled_crop_top = GST_ROUND_DOWN_N ((guint) crop_top, sub_h_factor);
366
367 plane_in +=
368 GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (format_info, p,
369 subsampled_crop_top) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, p);
370 plane_in +=
371 GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (format_info, p,
372 subsampled_crop_left) * bytes_per_pixel;
373 copy_width = GST_VIDEO_FRAME_COMP_WIDTH (out_frame, p) * bytes_per_pixel;
374
375
376 for (i = 0; i < GST_VIDEO_FRAME_COMP_HEIGHT (out_frame, p); ++i) {
377 memcpy (plane_out, plane_in, copy_width);
378 plane_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, p);
379 plane_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, p);
380 }
381 }
382 }
383
384 static void
gst_video_crop_transform_semi_planar(GstVideoCrop * vcrop,GstVideoFrame * in_frame,GstVideoFrame * out_frame,gint x,gint y)385 gst_video_crop_transform_semi_planar (GstVideoCrop * vcrop,
386 GstVideoFrame * in_frame, GstVideoFrame * out_frame, gint x, gint y)
387 {
388 gint width, height;
389 gint crop_top, crop_left;
390 guint8 *y_out, *uv_out;
391 guint8 *y_in, *uv_in;
392 guint i, dx;
393
394 width = GST_VIDEO_FRAME_WIDTH (out_frame);
395 height = GST_VIDEO_FRAME_HEIGHT (out_frame);
396 crop_left = vcrop->crop_left + x;
397 crop_top = vcrop->crop_top + y;
398
399 /* Y plane */
400 y_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
401 y_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
402
403 /* UV plane */
404 uv_in = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 1);
405 uv_out = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 1);
406
407 y_in += crop_top * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0) + crop_left;
408 dx = width;
409
410 for (i = 0; i < height; ++i) {
411 memcpy (y_out, y_in, dx);
412 y_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 0);
413 y_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 0);
414 }
415
416 uv_in += (crop_top / 2) * GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
417 uv_in += GST_ROUND_DOWN_2 (crop_left);
418 dx = GST_ROUND_UP_2 (width);
419
420 for (i = 0; i < GST_ROUND_UP_2 (height) / 2; i++) {
421 memcpy (uv_out, uv_in, dx);
422 uv_in += GST_VIDEO_FRAME_PLANE_STRIDE (in_frame, 1);
423 uv_out += GST_VIDEO_FRAME_PLANE_STRIDE (out_frame, 1);
424 }
425 }
426
427 static GstFlowReturn
gst_video_crop_transform_frame(GstVideoFilter * vfilter,GstVideoFrame * in_frame,GstVideoFrame * out_frame)428 gst_video_crop_transform_frame (GstVideoFilter * vfilter,
429 GstVideoFrame * in_frame, GstVideoFrame * out_frame)
430 {
431 GstVideoCrop *vcrop = GST_VIDEO_CROP (vfilter);
432 GstVideoCropMeta *meta = gst_buffer_get_video_crop_meta (in_frame->buffer);
433 gint x = 0, y = 0;
434
435 if (G_UNLIKELY (vcrop->need_update)) {
436 if (!gst_video_crop_set_info (vfilter, NULL, &vcrop->in_info, NULL,
437 &vcrop->out_info)) {
438 return GST_FLOW_ERROR;
439 }
440 }
441
442 if (meta) {
443 x = meta->x;
444 y = meta->y;
445 }
446
447 switch (vcrop->packing) {
448 case VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE:
449 gst_video_crop_transform_packed_simple (vcrop, in_frame, out_frame, x, y);
450 break;
451 case VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX:
452 gst_video_crop_transform_packed_complex (vcrop, in_frame, out_frame, x,
453 y);
454 break;
455 case VIDEO_CROP_PIXEL_FORMAT_PLANAR:
456 gst_video_crop_transform_planar (vcrop, in_frame, out_frame, x, y);
457 break;
458 case VIDEO_CROP_PIXEL_FORMAT_SEMI_PLANAR:
459 gst_video_crop_transform_semi_planar (vcrop, in_frame, out_frame, x, y);
460 break;
461 default:
462 g_assert_not_reached ();
463 }
464
465 return GST_FLOW_OK;
466 }
467
468 static gboolean
gst_video_crop_decide_allocation(GstBaseTransform * trans,GstQuery * query)469 gst_video_crop_decide_allocation (GstBaseTransform * trans, GstQuery * query)
470 {
471 GstVideoCrop *crop = GST_VIDEO_CROP (trans);
472 gboolean use_crop_meta;
473
474 use_crop_meta = (gst_query_find_allocation_meta (query,
475 GST_VIDEO_CROP_META_API_TYPE, NULL) &&
476 gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL));
477
478 if ((crop->crop_left | crop->crop_right | crop->crop_top | crop->
479 crop_bottom) == 0) {
480 GST_INFO_OBJECT (crop, "we are using passthrough");
481 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), TRUE);
482 gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), FALSE);
483 } else if (use_crop_meta) {
484 GST_INFO_OBJECT (crop, "we are doing in-place transform using crop meta");
485 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE);
486 gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), TRUE);
487 } else if (crop->raw_caps) {
488 GST_INFO_OBJECT (crop, "we are not using passthrough");
489 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE);
490 gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), FALSE);
491 } else {
492 GST_ELEMENT_ERROR (crop, STREAM, WRONG_TYPE,
493 ("Dowstream doesn't support crop for non-raw caps"), (NULL));
494 return FALSE;
495 }
496
497 return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
498 query);
499 }
500
501 static gboolean
gst_video_crop_propose_allocation(GstBaseTransform * trans,GstQuery * decide_query,GstQuery * query)502 gst_video_crop_propose_allocation (GstBaseTransform * trans,
503 GstQuery * decide_query, GstQuery * query)
504 {
505 /* if we are not passthrough, we can handle video meta and crop meta */
506 if (decide_query) {
507 GST_DEBUG_OBJECT (trans, "Advertising video meta and crop meta support");
508 gst_query_add_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL);
509 gst_query_add_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE, NULL);
510 }
511
512 return GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
513 decide_query, query);
514 }
515
516 static void
gst_video_crop_before_transform(GstBaseTransform * trans,GstBuffer * in)517 gst_video_crop_before_transform (GstBaseTransform * trans, GstBuffer * in)
518 {
519 GstVideoCrop *video_crop = GST_VIDEO_CROP (trans);
520 GstClockTime timestamp, stream_time;
521
522 timestamp = GST_BUFFER_TIMESTAMP (in);
523 stream_time =
524 gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME, timestamp);
525
526 GST_DEBUG_OBJECT (video_crop, "sync to %" GST_TIME_FORMAT,
527 GST_TIME_ARGS (timestamp));
528
529 if (GST_CLOCK_TIME_IS_VALID (stream_time))
530 gst_object_sync_values (GST_OBJECT (video_crop), stream_time);
531 }
532
533 static GstFlowReturn
gst_video_crop_transform_ip(GstBaseTransform * trans,GstBuffer * buf)534 gst_video_crop_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
535 {
536 GstVideoCrop *vcrop = GST_VIDEO_CROP (trans);
537 GstVideoFilter *vfilter = GST_VIDEO_FILTER (trans);
538 GstVideoMeta *video_meta;
539 GstVideoCropMeta *crop_meta;
540
541 GST_LOG_OBJECT (trans, "Transforming in-place");
542
543 if (G_UNLIKELY (vcrop->need_update)) {
544 if (!gst_video_crop_set_info (vfilter, NULL, &vcrop->in_info, NULL,
545 &vcrop->out_info)) {
546 return GST_FLOW_ERROR;
547 }
548 }
549
550 /* The video meta is required since we are going to make the caps
551 * width/height smaller, which would not result in a usable GstVideoInfo for
552 * mapping the buffer. */
553 video_meta = gst_buffer_get_video_meta (buf);
554 if (!video_meta) {
555 video_meta = gst_buffer_add_video_meta (buf, GST_VIDEO_FRAME_FLAG_NONE,
556 GST_VIDEO_INFO_FORMAT (&vcrop->in_info), vcrop->in_info.width,
557 vcrop->in_info.height);
558 }
559
560 crop_meta = gst_buffer_get_video_crop_meta (buf);
561 if (!crop_meta)
562 crop_meta = gst_buffer_add_video_crop_meta (buf);
563
564 crop_meta->x += vcrop->crop_left;
565 crop_meta->y += vcrop->crop_top;
566 crop_meta->width = GST_VIDEO_INFO_WIDTH (&vcrop->out_info);
567 crop_meta->height = GST_VIDEO_INFO_HEIGHT (&vcrop->out_info);
568
569 return GST_FLOW_OK;
570 }
571
572 static gint
gst_video_crop_transform_dimension(gint val,gint delta)573 gst_video_crop_transform_dimension (gint val, gint delta)
574 {
575 gint64 new_val = (gint64) val + (gint64) delta;
576
577 new_val = CLAMP (new_val, 1, G_MAXINT);
578
579 return (gint) new_val;
580 }
581
582 static gboolean
gst_video_crop_transform_dimension_value(const GValue * src_val,gint delta,GValue * dest_val,GstPadDirection direction,gboolean dynamic)583 gst_video_crop_transform_dimension_value (const GValue * src_val,
584 gint delta, GValue * dest_val, GstPadDirection direction, gboolean dynamic)
585 {
586 gboolean ret = TRUE;
587
588 if (G_VALUE_HOLDS_INT (src_val)) {
589 gint ival = g_value_get_int (src_val);
590 ival = gst_video_crop_transform_dimension (ival, delta);
591
592 if (dynamic) {
593 if (direction == GST_PAD_SRC) {
594 if (ival == G_MAXINT) {
595 g_value_init (dest_val, G_TYPE_INT);
596 g_value_set_int (dest_val, ival);
597 } else {
598 g_value_init (dest_val, GST_TYPE_INT_RANGE);
599 gst_value_set_int_range (dest_val, ival, G_MAXINT);
600 }
601 } else {
602 if (ival == 1) {
603 g_value_init (dest_val, G_TYPE_INT);
604 g_value_set_int (dest_val, ival);
605 } else {
606 g_value_init (dest_val, GST_TYPE_INT_RANGE);
607 gst_value_set_int_range (dest_val, 1, ival);
608 }
609 }
610 } else {
611 g_value_init (dest_val, G_TYPE_INT);
612 g_value_set_int (dest_val, ival);
613 }
614 } else if (GST_VALUE_HOLDS_INT_RANGE (src_val)) {
615 gint min = gst_value_get_int_range_min (src_val);
616 gint max = gst_value_get_int_range_max (src_val);
617
618 min = gst_video_crop_transform_dimension (min, delta);
619 max = gst_video_crop_transform_dimension (max, delta);
620
621 if (dynamic) {
622 if (direction == GST_PAD_SRC)
623 max = G_MAXINT;
624 else
625 min = 1;
626 }
627
628 if (min == max) {
629 g_value_init (dest_val, G_TYPE_INT);
630 g_value_set_int (dest_val, min);
631 } else {
632 g_value_init (dest_val, GST_TYPE_INT_RANGE);
633 gst_value_set_int_range (dest_val, min, max);
634 }
635 } else if (GST_VALUE_HOLDS_LIST (src_val)) {
636 gint i;
637
638 g_value_init (dest_val, GST_TYPE_LIST);
639
640 for (i = 0; i < gst_value_list_get_size (src_val); ++i) {
641 const GValue *list_val;
642 GValue newval = G_VALUE_INIT;
643
644 list_val = gst_value_list_get_value (src_val, i);
645 if (gst_video_crop_transform_dimension_value (list_val, delta, &newval,
646 direction, dynamic))
647 gst_value_list_append_value (dest_val, &newval);
648 g_value_unset (&newval);
649 }
650
651 if (gst_value_list_get_size (dest_val) == 0) {
652 g_value_unset (dest_val);
653 ret = FALSE;
654 }
655 } else {
656 ret = FALSE;
657 }
658
659 return ret;
660 }
661
662 static GstCaps *
gst_video_crop_transform_caps(GstBaseTransform * trans,GstPadDirection direction,GstCaps * caps,GstCaps * filter_caps)663 gst_video_crop_transform_caps (GstBaseTransform * trans,
664 GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps)
665 {
666 GstVideoCrop *vcrop;
667 GstCaps *other_caps;
668 gint dy, dx, i, left, right, bottom, top;
669 gboolean w_dynamic, h_dynamic;
670
671 vcrop = GST_VIDEO_CROP (trans);
672
673 GST_OBJECT_LOCK (vcrop);
674
675 GST_LOG_OBJECT (vcrop, "l=%d,r=%d,b=%d,t=%d",
676 vcrop->prop_left, vcrop->prop_right, vcrop->prop_bottom, vcrop->prop_top);
677
678 w_dynamic = (vcrop->prop_left == -1 || vcrop->prop_right == -1);
679 h_dynamic = (vcrop->prop_top == -1 || vcrop->prop_bottom == -1);
680
681 left = (vcrop->prop_left == -1) ? 0 : vcrop->prop_left;
682 right = (vcrop->prop_right == -1) ? 0 : vcrop->prop_right;
683 bottom = (vcrop->prop_bottom == -1) ? 0 : vcrop->prop_bottom;
684 top = (vcrop->prop_top == -1) ? 0 : vcrop->prop_top;
685
686 GST_OBJECT_UNLOCK (vcrop);
687
688 if (direction == GST_PAD_SRC) {
689 dx = left + right;
690 dy = top + bottom;
691 } else {
692 dx = 0 - (left + right);
693 dy = 0 - (top + bottom);
694 }
695
696 GST_LOG_OBJECT (vcrop, "transforming caps %" GST_PTR_FORMAT, caps);
697
698 other_caps = gst_caps_new_empty ();
699
700 for (i = 0; i < gst_caps_get_size (caps); ++i) {
701 const GValue *v;
702 GstStructure *structure, *new_structure;
703 GValue w_val = G_VALUE_INIT, h_val = G_VALUE_INIT;
704 GstCapsFeatures *features;
705
706 structure = gst_caps_get_structure (caps, i);
707 features = gst_caps_get_features (caps, i);
708
709 v = gst_structure_get_value (structure, "width");
710 if (!gst_video_crop_transform_dimension_value (v, dx, &w_val, direction,
711 w_dynamic)) {
712 GST_WARNING_OBJECT (vcrop, "could not transform width value with dx=%d"
713 ", caps structure=%" GST_PTR_FORMAT, dx, structure);
714 continue;
715 }
716
717 v = gst_structure_get_value (structure, "height");
718 if (!gst_video_crop_transform_dimension_value (v, dy, &h_val, direction,
719 h_dynamic)) {
720 g_value_unset (&w_val);
721 GST_WARNING_OBJECT (vcrop, "could not transform height value with dy=%d"
722 ", caps structure=%" GST_PTR_FORMAT, dy, structure);
723 continue;
724 }
725
726 new_structure = gst_structure_copy (structure);
727 gst_structure_set_value (new_structure, "width", &w_val);
728 gst_structure_set_value (new_structure, "height", &h_val);
729 g_value_unset (&w_val);
730 g_value_unset (&h_val);
731
732 GST_LOG_OBJECT (vcrop, "transformed structure %2d: %" GST_PTR_FORMAT
733 " => %" GST_PTR_FORMAT "features %" GST_PTR_FORMAT, i, structure,
734 new_structure, features);
735 gst_caps_append_structure (other_caps, new_structure);
736
737 gst_caps_set_features (other_caps, i, gst_caps_features_copy (features));
738 }
739
740 if (!gst_caps_is_empty (other_caps) && filter_caps) {
741 GstCaps *tmp = gst_caps_intersect_full (filter_caps, other_caps,
742 GST_CAPS_INTERSECT_FIRST);
743 gst_caps_replace (&other_caps, tmp);
744 gst_caps_unref (tmp);
745 }
746
747 return other_caps;
748 }
749
750 static gboolean
gst_video_crop_set_info(GstVideoFilter * vfilter,GstCaps * in,GstVideoInfo * in_info,GstCaps * out,GstVideoInfo * out_info)751 gst_video_crop_set_info (GstVideoFilter * vfilter, GstCaps * in,
752 GstVideoInfo * in_info, GstCaps * out, GstVideoInfo * out_info)
753 {
754 GstVideoCrop *crop = GST_VIDEO_CROP (vfilter);
755 GstCapsFeatures *features;
756 int dx, dy;
757
758 GST_OBJECT_LOCK (crop);
759 crop->need_update = FALSE;
760 crop->crop_left = crop->prop_left;
761 crop->crop_right = crop->prop_right;
762 crop->crop_top = crop->prop_top;
763 crop->crop_bottom = crop->prop_bottom;
764 GST_OBJECT_UNLOCK (crop);
765
766 dx = GST_VIDEO_INFO_WIDTH (in_info) - GST_VIDEO_INFO_WIDTH (out_info);
767 dy = GST_VIDEO_INFO_HEIGHT (in_info) - GST_VIDEO_INFO_HEIGHT (out_info);
768
769 if (crop->crop_left == -1 && crop->crop_right == -1) {
770 crop->crop_left = dx / 2;
771 crop->crop_right = dx / 2 + (dx & 1);
772 } else if (crop->crop_left == -1) {
773 if (G_UNLIKELY (crop->crop_right > dx))
774 goto cropping_too_much;
775 crop->crop_left = dx - crop->crop_right;
776 } else if (crop->crop_right == -1) {
777 if (G_UNLIKELY (crop->crop_left > dx))
778 goto cropping_too_much;
779 crop->crop_right = dx - crop->crop_left;
780 }
781
782 if (crop->crop_top == -1 && crop->crop_bottom == -1) {
783 crop->crop_top = dy / 2;
784 crop->crop_bottom = dy / 2 + (dy & 1);
785 } else if (crop->crop_top == -1) {
786 if (G_UNLIKELY (crop->crop_bottom > dy))
787 goto cropping_too_much;
788 crop->crop_top = dy - crop->crop_bottom;
789 } else if (crop->crop_bottom == -1) {
790 if (G_UNLIKELY (crop->crop_top > dy))
791 goto cropping_too_much;
792 crop->crop_bottom = dy - crop->crop_top;
793 }
794
795 if (G_UNLIKELY ((crop->crop_left + crop->crop_right) >=
796 GST_VIDEO_INFO_WIDTH (in_info)
797 || (crop->crop_top + crop->crop_bottom) >=
798 GST_VIDEO_INFO_HEIGHT (in_info)))
799 goto cropping_too_much;
800
801 if (in && out)
802 GST_LOG_OBJECT (crop, "incaps = %" GST_PTR_FORMAT ", outcaps = %"
803 GST_PTR_FORMAT, in, out);
804
805 if (in) {
806 features = gst_caps_get_features (in, 0);
807 crop->raw_caps = gst_caps_features_is_equal (features,
808 GST_CAPS_FEATURES_MEMORY_SYSTEM_MEMORY);
809 }
810
811 if (!crop->raw_caps)
812 goto beach;
813
814 switch (GST_VIDEO_INFO_FORMAT (in_info)) {
815 case GST_VIDEO_FORMAT_RGB:
816 case GST_VIDEO_FORMAT_BGR:
817 case GST_VIDEO_FORMAT_RGB16:
818 case GST_VIDEO_FORMAT_RGB15:
819 case GST_VIDEO_FORMAT_RGBx:
820 case GST_VIDEO_FORMAT_xRGB:
821 case GST_VIDEO_FORMAT_BGRx:
822 case GST_VIDEO_FORMAT_xBGR:
823 case GST_VIDEO_FORMAT_RGBA:
824 case GST_VIDEO_FORMAT_ARGB:
825 case GST_VIDEO_FORMAT_BGRA:
826 case GST_VIDEO_FORMAT_ABGR:
827 case GST_VIDEO_FORMAT_GRAY8:
828 case GST_VIDEO_FORMAT_GRAY16_LE:
829 case GST_VIDEO_FORMAT_GRAY16_BE:
830 case GST_VIDEO_FORMAT_AYUV:
831 crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_SIMPLE;
832 break;
833 case GST_VIDEO_FORMAT_YVYU:
834 case GST_VIDEO_FORMAT_YUY2:
835 case GST_VIDEO_FORMAT_UYVY:
836 crop->packing = VIDEO_CROP_PIXEL_FORMAT_PACKED_COMPLEX;
837 if (GST_VIDEO_INFO_FORMAT (in_info) == GST_VIDEO_FORMAT_UYVY) {
838 /* UYVY = 4:2:2 - [U0 Y0 V0 Y1] [U2 Y2 V2 Y3] [U4 Y4 V4 Y5] */
839 crop->macro_y_off = 1;
840 } else {
841 /* YUYV = 4:2:2 - [Y0 U0 Y1 V0] [Y2 U2 Y3 V2] [Y4 U4 Y5 V4] = YUY2 */
842 crop->macro_y_off = 0;
843 }
844 break;
845 case GST_VIDEO_FORMAT_I420:
846 case GST_VIDEO_FORMAT_I420_10BE:
847 case GST_VIDEO_FORMAT_I420_10LE:
848 case GST_VIDEO_FORMAT_I420_12BE:
849 case GST_VIDEO_FORMAT_I420_12LE:
850 case GST_VIDEO_FORMAT_A420:
851 case GST_VIDEO_FORMAT_A420_10BE:
852 case GST_VIDEO_FORMAT_A420_10LE:
853 case GST_VIDEO_FORMAT_YV12:
854 case GST_VIDEO_FORMAT_Y444:
855 case GST_VIDEO_FORMAT_Y444_10BE:
856 case GST_VIDEO_FORMAT_Y444_10LE:
857 case GST_VIDEO_FORMAT_Y444_12BE:
858 case GST_VIDEO_FORMAT_Y444_12LE:
859 case GST_VIDEO_FORMAT_A444_10BE:
860 case GST_VIDEO_FORMAT_A444_10LE:
861 case GST_VIDEO_FORMAT_Y42B:
862 case GST_VIDEO_FORMAT_I422_10BE:
863 case GST_VIDEO_FORMAT_I422_10LE:
864 case GST_VIDEO_FORMAT_A422_10BE:
865 case GST_VIDEO_FORMAT_A422_10LE:
866 case GST_VIDEO_FORMAT_I422_12BE:
867 case GST_VIDEO_FORMAT_I422_12LE:
868 case GST_VIDEO_FORMAT_GBR:
869 case GST_VIDEO_FORMAT_GBR_10BE:
870 case GST_VIDEO_FORMAT_GBR_10LE:
871 case GST_VIDEO_FORMAT_GBR_12BE:
872 case GST_VIDEO_FORMAT_GBR_12LE:
873 case GST_VIDEO_FORMAT_GBRA:
874 case GST_VIDEO_FORMAT_GBRA_10BE:
875 case GST_VIDEO_FORMAT_GBRA_10LE:
876 case GST_VIDEO_FORMAT_GBRA_12BE:
877 case GST_VIDEO_FORMAT_GBRA_12LE:
878 case GST_VIDEO_FORMAT_Y41B:
879 crop->packing = VIDEO_CROP_PIXEL_FORMAT_PLANAR;
880 break;
881 case GST_VIDEO_FORMAT_NV12:
882 case GST_VIDEO_FORMAT_NV21:
883 crop->packing = VIDEO_CROP_PIXEL_FORMAT_SEMI_PLANAR;
884 break;
885 default:
886 goto unknown_format;
887 }
888
889 beach:
890 crop->in_info = *in_info;
891 crop->out_info = *out_info;
892
893 /* Ensure our decide_allocation will be called again when needed */
894 if (gst_base_transform_is_passthrough (GST_BASE_TRANSFORM (crop))) {
895 gst_base_transform_set_passthrough (GST_BASE_TRANSFORM (crop), FALSE);
896 gst_base_transform_set_in_place (GST_BASE_TRANSFORM (crop), FALSE);
897 }
898
899 return TRUE;
900
901 /* ERROR */
902 cropping_too_much:
903 {
904 GST_WARNING_OBJECT (crop, "we are cropping too much");
905 return FALSE;
906 }
907 unknown_format:
908 {
909 GST_WARNING_OBJECT (crop, "Unsupported format");
910 return FALSE;
911 }
912 }
913
914 /* called with object lock */
915 static inline void
gst_video_crop_set_crop(GstVideoCrop * vcrop,gint new_value,gint * prop)916 gst_video_crop_set_crop (GstVideoCrop * vcrop, gint new_value, gint * prop)
917 {
918 if (*prop != new_value) {
919 *prop = new_value;
920 vcrop->need_update = TRUE;
921 }
922 }
923
924 static void
gst_video_crop_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)925 gst_video_crop_set_property (GObject * object, guint prop_id,
926 const GValue * value, GParamSpec * pspec)
927 {
928 GstVideoCrop *video_crop;
929
930 video_crop = GST_VIDEO_CROP (object);
931
932 GST_OBJECT_LOCK (video_crop);
933 switch (prop_id) {
934 case PROP_LEFT:
935 gst_video_crop_set_crop (video_crop, g_value_get_int (value),
936 &video_crop->prop_left);
937 break;
938 case PROP_RIGHT:
939 gst_video_crop_set_crop (video_crop, g_value_get_int (value),
940 &video_crop->prop_right);
941 break;
942 case PROP_TOP:
943 gst_video_crop_set_crop (video_crop, g_value_get_int (value),
944 &video_crop->prop_top);
945 break;
946 case PROP_BOTTOM:
947 gst_video_crop_set_crop (video_crop, g_value_get_int (value),
948 &video_crop->prop_bottom);
949 break;
950 default:
951 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
952 break;
953 }
954 GST_LOG_OBJECT (video_crop, "l=%d,r=%d,b=%d,t=%d, need_update:%d",
955 video_crop->prop_left, video_crop->prop_right, video_crop->prop_bottom,
956 video_crop->prop_top, video_crop->need_update);
957
958 GST_OBJECT_UNLOCK (video_crop);
959
960 gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (video_crop));
961 }
962
963 static void
gst_video_crop_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)964 gst_video_crop_get_property (GObject * object, guint prop_id, GValue * value,
965 GParamSpec * pspec)
966 {
967 GstVideoCrop *video_crop;
968
969 video_crop = GST_VIDEO_CROP (object);
970
971 GST_OBJECT_LOCK (video_crop);
972 switch (prop_id) {
973 case PROP_LEFT:
974 g_value_set_int (value, video_crop->prop_left);
975 break;
976 case PROP_RIGHT:
977 g_value_set_int (value, video_crop->prop_right);
978 break;
979 case PROP_TOP:
980 g_value_set_int (value, video_crop->prop_top);
981 break;
982 case PROP_BOTTOM:
983 g_value_set_int (value, video_crop->prop_bottom);
984 break;
985 default:
986 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
987 break;
988 }
989 GST_OBJECT_UNLOCK (video_crop);
990 }
991