1 /* GStreamer
2 * Copyright (C) <2007> Wim Taymans <wim@fluendo.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19 /**
20 * SECTION:element-simplevideomarkdetect
21 * @title: simplevideomarkdetect
22 * @see_also: #GstVideoMark
23 *
24 * This plugin detects #GstSimpleVideoMarkDetect:pattern-count squares in the bottom left
25 * corner of the video frames. The squares have a width and height of
26 * respectively #GstSimpleVideoMarkDetect:pattern-width and #GstSimpleVideoMarkDetect:pattern-height.
27 * Even squares must be black and odd squares must be white.
28 *
29 * When the pattern has been found, #GstSimpleVideoMarkDetect:pattern-data-count squares
30 * after the pattern squares are read as a bitarray. White squares represent a 1
31 * bit and black squares a 0 bit. The bitarray will will included in the element
32 * message that is posted (see below).
33 *
34 * After the pattern has been found and the data pattern has been read, an
35 * element message called `GstSimpleVideoMarkDetect` will
36 * be posted on the bus. If the pattern is no longer found in the frame, the
37 * same element message is posted with the have-pattern field set to %FALSE.
38 * The message is only posted if the #GstSimpleVideoMarkDetect:message property is %TRUE.
39 *
40 * The message's structure contains these fields:
41 *
42 * * #gboolean`have-pattern`: if the pattern was found. This field will be set to %TRUE for as long as
43 * the pattern was found in the frame and set to FALSE for the first frame
44 * that does not contain the pattern anymore.
45 *
46 * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
47 *
48 * * #GstClockTime `stream-time`: the stream time of the buffer.
49 *
50 * * #GstClockTime `running-time`: the running_time of the buffer.
51 *
52 * * #GstClockTime `duration`: the duration of the buffer.
53 *
54 * * #guint64 `data`: the data-pattern found after the pattern or 0 when have-signal is %FALSE.
55 *
56 * ## Example launch line
57 * |[
58 * gst-launch-1.0 videotestsrc ! simplevideomarkdetect ! videoconvert ! ximagesink
59 * ]|
60 *
61 */
62
63 #ifdef HAVE_CONFIG_H
64 #include "config.h"
65 #endif
66
67 #include <gst/gst.h>
68 #include <gst/video/video.h>
69 #include <gst/video/gstvideofilter.h>
70 #include "gstsimplevideomarkdetect.h"
71
72 GST_DEBUG_CATEGORY_STATIC (gst_video_detect_debug_category);
73 #define GST_CAT_DEFAULT gst_video_detect_debug_category
74
75 /* prototypes */
76
77
78 static void gst_video_detect_set_property (GObject * object,
79 guint property_id, const GValue * value, GParamSpec * pspec);
80 static void gst_video_detect_get_property (GObject * object,
81 guint property_id, GValue * value, GParamSpec * pspec);
82 static void gst_video_detect_dispose (GObject * object);
83 static void gst_video_detect_finalize (GObject * object);
84
85 static gboolean gst_video_detect_start (GstBaseTransform * trans);
86 static gboolean gst_video_detect_stop (GstBaseTransform * trans);
87 static gboolean gst_video_detect_set_info (GstVideoFilter * filter,
88 GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
89 GstVideoInfo * out_info);
90 static GstFlowReturn gst_video_detect_transform_frame_ip (GstVideoFilter *
91 filter, GstVideoFrame * frame);
92
93 enum
94 {
95 PROP_0,
96 PROP_MESSAGE,
97 PROP_PATTERN_WIDTH,
98 PROP_PATTERN_HEIGHT,
99 PROP_PATTERN_COUNT,
100 PROP_PATTERN_DATA_COUNT,
101 PROP_PATTERN_CENTER,
102 PROP_PATTERN_SENSITIVITY,
103 PROP_LEFT_OFFSET,
104 PROP_BOTTOM_OFFSET
105 };
106
107 #define DEFAULT_MESSAGE TRUE
108 #define DEFAULT_PATTERN_WIDTH 4
109 #define DEFAULT_PATTERN_HEIGHT 16
110 #define DEFAULT_PATTERN_COUNT 4
111 #define DEFAULT_PATTERN_DATA_COUNT 5
112 #define DEFAULT_PATTERN_CENTER 0.5
113 #define DEFAULT_PATTERN_SENSITIVITY 0.3
114 #define DEFAULT_LEFT_OFFSET 0
115 #define DEFAULT_BOTTOM_OFFSET 0
116
117 /* pad templates */
118
119 #define VIDEO_CAPS \
120 GST_VIDEO_CAPS_MAKE( \
121 "{ I420, YV12, Y41B, Y42B, Y444, YUY2, UYVY, AYUV, YVYU }")
122
123
124 /* class initialization */
125
126 G_DEFINE_TYPE_WITH_CODE (GstSimpleVideoMarkDetect, gst_video_detect,
127 GST_TYPE_VIDEO_FILTER,
128 GST_DEBUG_CATEGORY_INIT (gst_video_detect_debug_category,
129 "simplevideomarkdetect", 0,
130 "debug category for simplevideomarkdetect element"));
131
132 static void
gst_video_detect_class_init(GstSimpleVideoMarkDetectClass * klass)133 gst_video_detect_class_init (GstSimpleVideoMarkDetectClass * klass)
134 {
135 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
136 GstBaseTransformClass *base_transform_class =
137 GST_BASE_TRANSFORM_CLASS (klass);
138 GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
139
140 gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
141 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
142 gst_caps_from_string (VIDEO_CAPS)));
143 gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
144 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
145 gst_caps_from_string (VIDEO_CAPS)));
146
147 gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
148 "Video detecter", "Filter/Effect/Video",
149 "Detect patterns in a video signal", "Wim Taymans <wim@fluendo.com>");
150
151 gobject_class->set_property = gst_video_detect_set_property;
152 gobject_class->get_property = gst_video_detect_get_property;
153 gobject_class->dispose = gst_video_detect_dispose;
154 gobject_class->finalize = gst_video_detect_finalize;
155 base_transform_class->start = GST_DEBUG_FUNCPTR (gst_video_detect_start);
156 base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_video_detect_stop);
157 video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_detect_set_info);
158 video_filter_class->transform_frame_ip =
159 GST_DEBUG_FUNCPTR (gst_video_detect_transform_frame_ip);
160
161 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MESSAGE,
162 g_param_spec_boolean ("message", "Message",
163 "Post detected data as bus messages",
164 DEFAULT_MESSAGE,
165 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
166 g_object_class_install_property (gobject_class, PROP_PATTERN_WIDTH,
167 g_param_spec_int ("pattern-width", "Pattern width",
168 "The width of the pattern markers", 1, G_MAXINT,
169 DEFAULT_PATTERN_WIDTH,
170 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
171 g_object_class_install_property (gobject_class, PROP_PATTERN_HEIGHT,
172 g_param_spec_int ("pattern-height", "Pattern height",
173 "The height of the pattern markers", 1, G_MAXINT,
174 DEFAULT_PATTERN_HEIGHT,
175 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
176 g_object_class_install_property (gobject_class, PROP_PATTERN_COUNT,
177 g_param_spec_int ("pattern-count", "Pattern count",
178 "The number of pattern markers", 0, G_MAXINT,
179 DEFAULT_PATTERN_COUNT,
180 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
181 g_object_class_install_property (gobject_class, PROP_PATTERN_DATA_COUNT,
182 g_param_spec_int ("pattern-data-count", "Pattern data count",
183 "The number of extra data pattern markers", 0, G_MAXINT,
184 DEFAULT_PATTERN_DATA_COUNT,
185 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
186 g_object_class_install_property (gobject_class, PROP_PATTERN_CENTER,
187 g_param_spec_double ("pattern-center", "Pattern center",
188 "The center of the black/white separation (0.0 = lowest, 1.0 highest)",
189 0.0, 1.0, DEFAULT_PATTERN_CENTER,
190 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
191 g_object_class_install_property (gobject_class, PROP_PATTERN_SENSITIVITY,
192 g_param_spec_double ("pattern-sensitivity", "Pattern sensitivity",
193 "The sensitivity around the center for detecting the markers "
194 "(0.0 = lowest, 1.0 highest)", 0.0, 1.0, DEFAULT_PATTERN_SENSITIVITY,
195 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
196 g_object_class_install_property (gobject_class, PROP_LEFT_OFFSET,
197 g_param_spec_int ("left-offset", "Left Offset",
198 "The offset from the left border where the pattern starts", 0,
199 G_MAXINT, DEFAULT_LEFT_OFFSET,
200 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
201 g_object_class_install_property (gobject_class, PROP_BOTTOM_OFFSET,
202 g_param_spec_int ("bottom-offset", "Bottom Offset",
203 "The offset from the bottom border where the pattern starts", 0,
204 G_MAXINT, DEFAULT_BOTTOM_OFFSET,
205 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
206 }
207
208 static void
gst_video_detect_init(GstSimpleVideoMarkDetect * simplevideomarkdetect)209 gst_video_detect_init (GstSimpleVideoMarkDetect * simplevideomarkdetect)
210 {
211 }
212
213 void
gst_video_detect_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)214 gst_video_detect_set_property (GObject * object, guint property_id,
215 const GValue * value, GParamSpec * pspec)
216 {
217 GstSimpleVideoMarkDetect *simplevideomarkdetect =
218 GST_SIMPLE_VIDEO_MARK_DETECT (object);
219
220 GST_DEBUG_OBJECT (simplevideomarkdetect, "set_property");
221
222 switch (property_id) {
223 case PROP_MESSAGE:
224 simplevideomarkdetect->message = g_value_get_boolean (value);
225 break;
226 case PROP_PATTERN_WIDTH:
227 simplevideomarkdetect->pattern_width = g_value_get_int (value);
228 break;
229 case PROP_PATTERN_HEIGHT:
230 simplevideomarkdetect->pattern_height = g_value_get_int (value);
231 break;
232 case PROP_PATTERN_COUNT:
233 simplevideomarkdetect->pattern_count = g_value_get_int (value);
234 break;
235 case PROP_PATTERN_DATA_COUNT:
236 simplevideomarkdetect->pattern_data_count = g_value_get_int (value);
237 break;
238 case PROP_PATTERN_CENTER:
239 simplevideomarkdetect->pattern_center = g_value_get_double (value);
240 break;
241 case PROP_PATTERN_SENSITIVITY:
242 simplevideomarkdetect->pattern_sensitivity = g_value_get_double (value);
243 break;
244 case PROP_LEFT_OFFSET:
245 simplevideomarkdetect->left_offset = g_value_get_int (value);
246 break;
247 case PROP_BOTTOM_OFFSET:
248 simplevideomarkdetect->bottom_offset = g_value_get_int (value);
249 break;
250 default:
251 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
252 break;
253 }
254 }
255
256 void
gst_video_detect_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)257 gst_video_detect_get_property (GObject * object, guint property_id,
258 GValue * value, GParamSpec * pspec)
259 {
260 GstSimpleVideoMarkDetect *simplevideomarkdetect =
261 GST_SIMPLE_VIDEO_MARK_DETECT (object);
262
263 GST_DEBUG_OBJECT (simplevideomarkdetect, "get_property");
264
265 switch (property_id) {
266 case PROP_MESSAGE:
267 g_value_set_boolean (value, simplevideomarkdetect->message);
268 break;
269 case PROP_PATTERN_WIDTH:
270 g_value_set_int (value, simplevideomarkdetect->pattern_width);
271 break;
272 case PROP_PATTERN_HEIGHT:
273 g_value_set_int (value, simplevideomarkdetect->pattern_height);
274 break;
275 case PROP_PATTERN_COUNT:
276 g_value_set_int (value, simplevideomarkdetect->pattern_count);
277 break;
278 case PROP_PATTERN_DATA_COUNT:
279 g_value_set_int (value, simplevideomarkdetect->pattern_data_count);
280 break;
281 case PROP_PATTERN_CENTER:
282 g_value_set_double (value, simplevideomarkdetect->pattern_center);
283 break;
284 case PROP_PATTERN_SENSITIVITY:
285 g_value_set_double (value, simplevideomarkdetect->pattern_sensitivity);
286 break;
287 case PROP_LEFT_OFFSET:
288 g_value_set_int (value, simplevideomarkdetect->left_offset);
289 break;
290 case PROP_BOTTOM_OFFSET:
291 g_value_set_int (value, simplevideomarkdetect->bottom_offset);
292 break;
293 default:
294 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
295 break;
296 }
297 }
298
299 void
gst_video_detect_dispose(GObject * object)300 gst_video_detect_dispose (GObject * object)
301 {
302 GstSimpleVideoMarkDetect *simplevideomarkdetect =
303 GST_SIMPLE_VIDEO_MARK_DETECT (object);
304
305 GST_DEBUG_OBJECT (simplevideomarkdetect, "dispose");
306
307 /* clean up as possible. may be called multiple times */
308
309 G_OBJECT_CLASS (gst_video_detect_parent_class)->dispose (object);
310 }
311
312 void
gst_video_detect_finalize(GObject * object)313 gst_video_detect_finalize (GObject * object)
314 {
315 GstSimpleVideoMarkDetect *simplevideomarkdetect =
316 GST_SIMPLE_VIDEO_MARK_DETECT (object);
317
318 GST_DEBUG_OBJECT (simplevideomarkdetect, "finalize");
319
320 /* clean up object here */
321
322 G_OBJECT_CLASS (gst_video_detect_parent_class)->finalize (object);
323 }
324
325 static gboolean
gst_video_detect_start(GstBaseTransform * trans)326 gst_video_detect_start (GstBaseTransform * trans)
327 {
328 GstSimpleVideoMarkDetect *simplevideomarkdetect =
329 GST_SIMPLE_VIDEO_MARK_DETECT (trans);
330
331 GST_DEBUG_OBJECT (simplevideomarkdetect, "start");
332
333 return TRUE;
334 }
335
336 static gboolean
gst_video_detect_stop(GstBaseTransform * trans)337 gst_video_detect_stop (GstBaseTransform * trans)
338 {
339 GstSimpleVideoMarkDetect *simplevideomarkdetect =
340 GST_SIMPLE_VIDEO_MARK_DETECT (trans);
341
342 GST_DEBUG_OBJECT (simplevideomarkdetect, "stop");
343
344 return TRUE;
345 }
346
347 static gboolean
gst_video_detect_set_info(GstVideoFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)348 gst_video_detect_set_info (GstVideoFilter * filter, GstCaps * incaps,
349 GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
350 {
351 GstSimpleVideoMarkDetect *simplevideomarkdetect =
352 GST_SIMPLE_VIDEO_MARK_DETECT (filter);
353
354 GST_DEBUG_OBJECT (simplevideomarkdetect, "set_info");
355
356 return TRUE;
357 }
358
359 static void
gst_video_detect_post_message(GstSimpleVideoMarkDetect * simplevideomarkdetect,GstBuffer * buffer,guint64 data)360 gst_video_detect_post_message (GstSimpleVideoMarkDetect * simplevideomarkdetect,
361 GstBuffer * buffer, guint64 data)
362 {
363 GstBaseTransform *trans;
364 GstMessage *m;
365 guint64 duration, timestamp, running_time, stream_time;
366
367 trans = GST_BASE_TRANSFORM_CAST (simplevideomarkdetect);
368
369 /* get timestamps */
370 timestamp = GST_BUFFER_TIMESTAMP (buffer);
371 duration = GST_BUFFER_DURATION (buffer);
372 running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
373 timestamp);
374 stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
375 timestamp);
376
377 /* post message */
378 m = gst_message_new_element (GST_OBJECT_CAST (simplevideomarkdetect),
379 gst_structure_new ("GstSimpleVideoMarkDetect",
380 "have-pattern", G_TYPE_BOOLEAN, simplevideomarkdetect->in_pattern,
381 "timestamp", G_TYPE_UINT64, timestamp,
382 "stream-time", G_TYPE_UINT64, stream_time,
383 "running-time", G_TYPE_UINT64, running_time,
384 "duration", G_TYPE_UINT64, duration,
385 "data", G_TYPE_UINT64, data, NULL));
386 gst_element_post_message (GST_ELEMENT_CAST (simplevideomarkdetect), m);
387 }
388
389 static gdouble
gst_video_detect_calc_brightness(GstSimpleVideoMarkDetect * simplevideomarkdetect,guint8 * data,gint width,gint height,gint row_stride,gint pixel_stride)390 gst_video_detect_calc_brightness (GstSimpleVideoMarkDetect *
391 simplevideomarkdetect, guint8 * data, gint width, gint height,
392 gint row_stride, gint pixel_stride)
393 {
394 gint i, j;
395 guint64 sum;
396
397 sum = 0;
398 for (i = 0; i < height; i++) {
399 for (j = 0; j < width; j++) {
400 sum += data[pixel_stride * j];
401 }
402 data += row_stride;
403 }
404 return sum / (255.0 * width * height);
405 }
406
407 static gint
calculate_pw(gint pw,gint x,gint width)408 calculate_pw (gint pw, gint x, gint width)
409 {
410 if (x < 0)
411 pw += x;
412 else if ((x + pw) > width)
413 pw = width - x;
414
415 return pw;
416 }
417
418 static void
gst_video_detect_yuv(GstSimpleVideoMarkDetect * simplevideomarkdetect,GstVideoFrame * frame)419 gst_video_detect_yuv (GstSimpleVideoMarkDetect * simplevideomarkdetect,
420 GstVideoFrame * frame)
421 {
422 gdouble brightness;
423 gint i, pw, ph, row_stride, pixel_stride;
424 gint width, height, offset_calc, x, y;
425 guint8 *d;
426 guint64 pattern_data;
427 gint total_pattern;
428
429 width = frame->info.width;
430 height = frame->info.height;
431
432 pw = simplevideomarkdetect->pattern_width;
433 ph = simplevideomarkdetect->pattern_height;
434 row_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
435 pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
436
437 d = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
438 /* move to start of bottom left, adjust for offsets */
439 offset_calc =
440 row_stride * (height - ph - simplevideomarkdetect->bottom_offset) +
441 pixel_stride * simplevideomarkdetect->left_offset;
442 x = simplevideomarkdetect->left_offset;
443 y = height - ph - simplevideomarkdetect->bottom_offset;
444
445 total_pattern =
446 simplevideomarkdetect->pattern_count +
447 simplevideomarkdetect->pattern_data_count;
448 /* If x and y offset values are outside the video, no need to analyze */
449 if ((x + (pw * total_pattern)) < 0 || x > width || (y + height) < 0
450 || y > height) {
451 GST_ERROR_OBJECT (simplevideomarkdetect,
452 "simplevideomarkdetect pattern is outside the video. Not Analyzing.");
453 return;
454 }
455
456 /* Offset calculation less than 0, then reset to 0 */
457 if (offset_calc < 0)
458 offset_calc = 0;
459 /* Y position of mark is negative or pattern exceeds the video height,
460 then recalculate pattern height for partial display */
461 if (y < 0)
462 ph += y;
463 else if ((y + ph) > height)
464 ph = height - y;
465 /* If pattern height is less than 0, need not analyze anything */
466 if (ph < 0)
467 return;
468
469 /* move to start of bottom left */
470 d += offset_calc;
471
472 /* analyze the bottom left pixels */
473 for (i = 0; i < simplevideomarkdetect->pattern_count; i++) {
474 gint draw_pw;
475 /* calc brightness of width * height box */
476 brightness =
477 gst_video_detect_calc_brightness (simplevideomarkdetect, d, pw, ph,
478 row_stride, pixel_stride);
479
480 GST_DEBUG_OBJECT (simplevideomarkdetect, "brightness %f", brightness);
481
482 if (i & 1) {
483 /* odd pixels must be white, all pixels darker than the center +
484 * sensitivity are considered wrong. */
485 if (brightness <
486 (simplevideomarkdetect->pattern_center +
487 simplevideomarkdetect->pattern_sensitivity))
488 goto no_pattern;
489 } else {
490 /* even pixels must be black, pixels lighter than the center - sensitivity
491 * are considered wrong. */
492 if (brightness >
493 (simplevideomarkdetect->pattern_center -
494 simplevideomarkdetect->pattern_sensitivity))
495 goto no_pattern;
496 }
497
498 /* X position of mark is negative or pattern exceeds the video width,
499 then recalculate pattern width for partial display */
500 draw_pw = calculate_pw (pw, x, width);
501 /* If pattern width is less than 0, continue with the next pattern */
502 if (draw_pw < 0)
503 continue;
504
505 /* move to i-th pattern */
506 d += pixel_stride * draw_pw;
507 x += draw_pw;
508
509 if ((x + (pw * (total_pattern - i - 1))) < 0 || x >= width)
510 break;
511 }
512 GST_DEBUG_OBJECT (simplevideomarkdetect, "found pattern");
513
514 pattern_data = 0;
515
516 /* get the data of the pattern */
517 for (i = 0; i < simplevideomarkdetect->pattern_data_count; i++) {
518 gint draw_pw;
519 /* calc brightness of width * height box */
520 brightness =
521 gst_video_detect_calc_brightness (simplevideomarkdetect, d, pw, ph,
522 row_stride, pixel_stride);
523 /* update pattern, we just use the center to decide between black and white. */
524 pattern_data <<= 1;
525 if (brightness > simplevideomarkdetect->pattern_center)
526 pattern_data |= 1;
527
528 /* X position of mark is negative or pattern exceeds the video width,
529 then recalculate pattern width for partial display */
530 draw_pw = calculate_pw (pw, x, width);
531 /* If pattern width is less than 0, continue with the next pattern */
532 if (draw_pw < 0)
533 continue;
534
535 /* move to i-th pattern data */
536 d += pixel_stride * draw_pw;
537 x += draw_pw;
538
539 if ((x + (pw * (simplevideomarkdetect->pattern_data_count - i - 1))) < 0
540 || x >= width)
541 break;
542 }
543
544 GST_DEBUG_OBJECT (simplevideomarkdetect, "have data %" G_GUINT64_FORMAT,
545 pattern_data);
546
547 simplevideomarkdetect->in_pattern = TRUE;
548 gst_video_detect_post_message (simplevideomarkdetect, frame->buffer,
549 pattern_data);
550
551 return;
552
553 no_pattern:
554 {
555 GST_DEBUG_OBJECT (simplevideomarkdetect, "no pattern found");
556 if (simplevideomarkdetect->in_pattern) {
557 simplevideomarkdetect->in_pattern = FALSE;
558 gst_video_detect_post_message (simplevideomarkdetect, frame->buffer, 0);
559 }
560 return;
561 }
562 }
563
564 static GstFlowReturn
gst_video_detect_transform_frame_ip(GstVideoFilter * filter,GstVideoFrame * frame)565 gst_video_detect_transform_frame_ip (GstVideoFilter * filter,
566 GstVideoFrame * frame)
567 {
568 GstSimpleVideoMarkDetect *simplevideomarkdetect =
569 GST_SIMPLE_VIDEO_MARK_DETECT (filter);
570
571 GST_DEBUG_OBJECT (simplevideomarkdetect, "transform_frame_ip");
572
573 gst_video_detect_yuv (simplevideomarkdetect, frame);
574
575 return GST_FLOW_OK;
576 }
577