1 /* GStreamer
2 * Copyright (C) <2007> Wim Taymans <wim@fluendo.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19 /**
20 * SECTION:element-simplevideomarkdetect
21 * @title: simplevideomarkdetect
22 * @see_also: #GstVideoMark
23 *
24 * This plugin detects #GstSimpleVideoMarkDetect:pattern-count squares in the bottom left
25 * corner of the video frames. The squares have a width and height of
26 * respectively #GstSimpleVideoMarkDetect:pattern-width and #GstSimpleVideoMarkDetect:pattern-height.
27 * Even squares must be black and odd squares must be white.
28 *
29 * When the pattern has been found, #GstSimpleVideoMarkDetect:pattern-data-count squares
30 * after the pattern squares are read as a bitarray. White squares represent a 1
31 * bit and black squares a 0 bit. The bitarray will will included in the element
32 * message that is posted (see below).
33 *
34 * After the pattern has been found and the data pattern has been read, an
35 * element message called `GstSimpleVideoMarkDetect` will
36 * be posted on the bus. If the pattern is no longer found in the frame, the
37 * same element message is posted with the have-pattern field set to %FALSE.
38 * The message is only posted if the #GstSimpleVideoMarkDetect:message property is %TRUE.
39 *
40 * The message's structure contains these fields:
41 *
42 * * #gboolean`have-pattern`: if the pattern was found. This field will be set to %TRUE for as long as
43 * the pattern was found in the frame and set to FALSE for the first frame
44 * that does not contain the pattern anymore.
45 *
46 * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
47 *
48 * * #GstClockTime `stream-time`: the stream time of the buffer.
49 *
50 * * #GstClockTime `running-time`: the running_time of the buffer.
51 *
52 * * #GstClockTime `duration`: the duration of the buffer.
53 *
54 * * #guint64 `data`: the data-pattern found after the pattern or 0 when have-signal is %FALSE.
55 *
56 * ## Example launch line
57 * |[
58 * gst-launch-1.0 videotestsrc ! simplevideomarkdetect ! videoconvert ! ximagesink
59 * ]|
60 *
61 */
62
63 #ifdef HAVE_CONFIG_H
64 #include "config.h"
65 #endif
66
67 #include <gst/gst.h>
68 #include <gst/video/video.h>
69 #include <gst/video/gstvideofilter.h>
70 #include "gstsimplevideomarkdetect.h"
71
72 GST_DEBUG_CATEGORY_STATIC (gst_video_detect_debug_category);
73 #define GST_CAT_DEFAULT gst_video_detect_debug_category
74
75 /* prototypes */
76
77
78 static void gst_video_detect_set_property (GObject * object,
79 guint property_id, const GValue * value, GParamSpec * pspec);
80 static void gst_video_detect_get_property (GObject * object,
81 guint property_id, GValue * value, GParamSpec * pspec);
82 static void gst_video_detect_dispose (GObject * object);
83 static void gst_video_detect_finalize (GObject * object);
84
85 static gboolean gst_video_detect_start (GstBaseTransform * trans);
86 static gboolean gst_video_detect_stop (GstBaseTransform * trans);
87 static gboolean gst_video_detect_set_info (GstVideoFilter * filter,
88 GstCaps * incaps, GstVideoInfo * in_info, GstCaps * outcaps,
89 GstVideoInfo * out_info);
90 static GstFlowReturn gst_video_detect_transform_frame_ip (GstVideoFilter *
91 filter, GstVideoFrame * frame);
92
93 enum
94 {
95 PROP_0,
96 PROP_MESSAGE,
97 PROP_PATTERN_WIDTH,
98 PROP_PATTERN_HEIGHT,
99 PROP_PATTERN_COUNT,
100 PROP_PATTERN_DATA_COUNT,
101 PROP_PATTERN_CENTER,
102 PROP_PATTERN_SENSITIVITY,
103 PROP_LEFT_OFFSET,
104 PROP_BOTTOM_OFFSET
105 };
106
107 #define DEFAULT_MESSAGE TRUE
108 #define DEFAULT_PATTERN_WIDTH 4
109 #define DEFAULT_PATTERN_HEIGHT 16
110 #define DEFAULT_PATTERN_COUNT 4
111 #define DEFAULT_PATTERN_DATA_COUNT 5
112 #define DEFAULT_PATTERN_CENTER 0.5
113 #define DEFAULT_PATTERN_SENSITIVITY 0.3
114 #define DEFAULT_LEFT_OFFSET 0
115 #define DEFAULT_BOTTOM_OFFSET 0
116
117 /* pad templates */
118
119 #define VIDEO_CAPS \
120 GST_VIDEO_CAPS_MAKE( \
121 "{ I420, YV12, Y41B, Y42B, Y444, YUY2, UYVY, AYUV, YVYU }")
122
123
124 /* class initialization */
125
126 G_DEFINE_TYPE_WITH_CODE (GstSimpleVideoMarkDetect, gst_video_detect,
127 GST_TYPE_VIDEO_FILTER,
128 GST_DEBUG_CATEGORY_INIT (gst_video_detect_debug_category,
129 "simplevideomarkdetect", 0,
130 "debug category for simplevideomarkdetect element"));
131 GST_ELEMENT_REGISTER_DEFINE (simplevideomarkdetect,
132 "simplevideomarkdetect", GST_RANK_NONE, GST_TYPE_SIMPLE_VIDEO_MARK_DETECT);
133
134 static void
gst_video_detect_class_init(GstSimpleVideoMarkDetectClass * klass)135 gst_video_detect_class_init (GstSimpleVideoMarkDetectClass * klass)
136 {
137 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
138 GstBaseTransformClass *base_transform_class =
139 GST_BASE_TRANSFORM_CLASS (klass);
140 GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
141
142 gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
143 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
144 gst_caps_from_string (VIDEO_CAPS)));
145 gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
146 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
147 gst_caps_from_string (VIDEO_CAPS)));
148
149 gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
150 "Video detecter", "Filter/Effect/Video",
151 "Detect patterns in a video signal", "Wim Taymans <wim@fluendo.com>");
152
153 gobject_class->set_property = gst_video_detect_set_property;
154 gobject_class->get_property = gst_video_detect_get_property;
155 gobject_class->dispose = gst_video_detect_dispose;
156 gobject_class->finalize = gst_video_detect_finalize;
157 base_transform_class->start = GST_DEBUG_FUNCPTR (gst_video_detect_start);
158 base_transform_class->stop = GST_DEBUG_FUNCPTR (gst_video_detect_stop);
159 video_filter_class->set_info = GST_DEBUG_FUNCPTR (gst_video_detect_set_info);
160 video_filter_class->transform_frame_ip =
161 GST_DEBUG_FUNCPTR (gst_video_detect_transform_frame_ip);
162
163 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MESSAGE,
164 g_param_spec_boolean ("message", "Message",
165 "Post detected data as bus messages",
166 DEFAULT_MESSAGE,
167 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
168 g_object_class_install_property (gobject_class, PROP_PATTERN_WIDTH,
169 g_param_spec_int ("pattern-width", "Pattern width",
170 "The width of the pattern markers", 1, G_MAXINT,
171 DEFAULT_PATTERN_WIDTH,
172 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
173 g_object_class_install_property (gobject_class, PROP_PATTERN_HEIGHT,
174 g_param_spec_int ("pattern-height", "Pattern height",
175 "The height of the pattern markers", 1, G_MAXINT,
176 DEFAULT_PATTERN_HEIGHT,
177 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
178 g_object_class_install_property (gobject_class, PROP_PATTERN_COUNT,
179 g_param_spec_int ("pattern-count", "Pattern count",
180 "The number of pattern markers", 0, G_MAXINT,
181 DEFAULT_PATTERN_COUNT,
182 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
183 g_object_class_install_property (gobject_class, PROP_PATTERN_DATA_COUNT,
184 g_param_spec_int ("pattern-data-count", "Pattern data count",
185 "The number of extra data pattern markers", 0, G_MAXINT,
186 DEFAULT_PATTERN_DATA_COUNT,
187 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
188 g_object_class_install_property (gobject_class, PROP_PATTERN_CENTER,
189 g_param_spec_double ("pattern-center", "Pattern center",
190 "The center of the black/white separation (0.0 = lowest, 1.0 highest)",
191 0.0, 1.0, DEFAULT_PATTERN_CENTER,
192 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
193 g_object_class_install_property (gobject_class, PROP_PATTERN_SENSITIVITY,
194 g_param_spec_double ("pattern-sensitivity", "Pattern sensitivity",
195 "The sensitivity around the center for detecting the markers "
196 "(0.0 = lowest, 1.0 highest)", 0.0, 1.0, DEFAULT_PATTERN_SENSITIVITY,
197 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
198 g_object_class_install_property (gobject_class, PROP_LEFT_OFFSET,
199 g_param_spec_int ("left-offset", "Left Offset",
200 "The offset from the left border where the pattern starts", 0,
201 G_MAXINT, DEFAULT_LEFT_OFFSET,
202 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
203 g_object_class_install_property (gobject_class, PROP_BOTTOM_OFFSET,
204 g_param_spec_int ("bottom-offset", "Bottom Offset",
205 "The offset from the bottom border where the pattern starts", 0,
206 G_MAXINT, DEFAULT_BOTTOM_OFFSET,
207 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
208 }
209
210 static void
gst_video_detect_init(GstSimpleVideoMarkDetect * simplevideomarkdetect)211 gst_video_detect_init (GstSimpleVideoMarkDetect * simplevideomarkdetect)
212 {
213 }
214
215 void
gst_video_detect_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)216 gst_video_detect_set_property (GObject * object, guint property_id,
217 const GValue * value, GParamSpec * pspec)
218 {
219 GstSimpleVideoMarkDetect *simplevideomarkdetect =
220 GST_SIMPLE_VIDEO_MARK_DETECT (object);
221
222 GST_DEBUG_OBJECT (simplevideomarkdetect, "set_property");
223
224 switch (property_id) {
225 case PROP_MESSAGE:
226 simplevideomarkdetect->message = g_value_get_boolean (value);
227 break;
228 case PROP_PATTERN_WIDTH:
229 simplevideomarkdetect->pattern_width = g_value_get_int (value);
230 break;
231 case PROP_PATTERN_HEIGHT:
232 simplevideomarkdetect->pattern_height = g_value_get_int (value);
233 break;
234 case PROP_PATTERN_COUNT:
235 simplevideomarkdetect->pattern_count = g_value_get_int (value);
236 break;
237 case PROP_PATTERN_DATA_COUNT:
238 simplevideomarkdetect->pattern_data_count = g_value_get_int (value);
239 break;
240 case PROP_PATTERN_CENTER:
241 simplevideomarkdetect->pattern_center = g_value_get_double (value);
242 break;
243 case PROP_PATTERN_SENSITIVITY:
244 simplevideomarkdetect->pattern_sensitivity = g_value_get_double (value);
245 break;
246 case PROP_LEFT_OFFSET:
247 simplevideomarkdetect->left_offset = g_value_get_int (value);
248 break;
249 case PROP_BOTTOM_OFFSET:
250 simplevideomarkdetect->bottom_offset = g_value_get_int (value);
251 break;
252 default:
253 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
254 break;
255 }
256 }
257
258 void
gst_video_detect_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)259 gst_video_detect_get_property (GObject * object, guint property_id,
260 GValue * value, GParamSpec * pspec)
261 {
262 GstSimpleVideoMarkDetect *simplevideomarkdetect =
263 GST_SIMPLE_VIDEO_MARK_DETECT (object);
264
265 GST_DEBUG_OBJECT (simplevideomarkdetect, "get_property");
266
267 switch (property_id) {
268 case PROP_MESSAGE:
269 g_value_set_boolean (value, simplevideomarkdetect->message);
270 break;
271 case PROP_PATTERN_WIDTH:
272 g_value_set_int (value, simplevideomarkdetect->pattern_width);
273 break;
274 case PROP_PATTERN_HEIGHT:
275 g_value_set_int (value, simplevideomarkdetect->pattern_height);
276 break;
277 case PROP_PATTERN_COUNT:
278 g_value_set_int (value, simplevideomarkdetect->pattern_count);
279 break;
280 case PROP_PATTERN_DATA_COUNT:
281 g_value_set_int (value, simplevideomarkdetect->pattern_data_count);
282 break;
283 case PROP_PATTERN_CENTER:
284 g_value_set_double (value, simplevideomarkdetect->pattern_center);
285 break;
286 case PROP_PATTERN_SENSITIVITY:
287 g_value_set_double (value, simplevideomarkdetect->pattern_sensitivity);
288 break;
289 case PROP_LEFT_OFFSET:
290 g_value_set_int (value, simplevideomarkdetect->left_offset);
291 break;
292 case PROP_BOTTOM_OFFSET:
293 g_value_set_int (value, simplevideomarkdetect->bottom_offset);
294 break;
295 default:
296 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
297 break;
298 }
299 }
300
301 void
gst_video_detect_dispose(GObject * object)302 gst_video_detect_dispose (GObject * object)
303 {
304 GstSimpleVideoMarkDetect *simplevideomarkdetect =
305 GST_SIMPLE_VIDEO_MARK_DETECT (object);
306
307 GST_DEBUG_OBJECT (simplevideomarkdetect, "dispose");
308
309 /* clean up as possible. may be called multiple times */
310
311 G_OBJECT_CLASS (gst_video_detect_parent_class)->dispose (object);
312 }
313
314 void
gst_video_detect_finalize(GObject * object)315 gst_video_detect_finalize (GObject * object)
316 {
317 GstSimpleVideoMarkDetect *simplevideomarkdetect =
318 GST_SIMPLE_VIDEO_MARK_DETECT (object);
319
320 GST_DEBUG_OBJECT (simplevideomarkdetect, "finalize");
321
322 /* clean up object here */
323
324 G_OBJECT_CLASS (gst_video_detect_parent_class)->finalize (object);
325 }
326
327 static gboolean
gst_video_detect_start(GstBaseTransform * trans)328 gst_video_detect_start (GstBaseTransform * trans)
329 {
330 GstSimpleVideoMarkDetect *simplevideomarkdetect =
331 GST_SIMPLE_VIDEO_MARK_DETECT (trans);
332
333 GST_DEBUG_OBJECT (simplevideomarkdetect, "start");
334
335 return TRUE;
336 }
337
338 static gboolean
gst_video_detect_stop(GstBaseTransform * trans)339 gst_video_detect_stop (GstBaseTransform * trans)
340 {
341 GstSimpleVideoMarkDetect *simplevideomarkdetect =
342 GST_SIMPLE_VIDEO_MARK_DETECT (trans);
343
344 GST_DEBUG_OBJECT (simplevideomarkdetect, "stop");
345
346 return TRUE;
347 }
348
349 static gboolean
gst_video_detect_set_info(GstVideoFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)350 gst_video_detect_set_info (GstVideoFilter * filter, GstCaps * incaps,
351 GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
352 {
353 GstSimpleVideoMarkDetect *simplevideomarkdetect =
354 GST_SIMPLE_VIDEO_MARK_DETECT (filter);
355
356 GST_DEBUG_OBJECT (simplevideomarkdetect, "set_info");
357
358 return TRUE;
359 }
360
361 static void
gst_video_detect_post_message(GstSimpleVideoMarkDetect * simplevideomarkdetect,GstBuffer * buffer,guint64 data)362 gst_video_detect_post_message (GstSimpleVideoMarkDetect * simplevideomarkdetect,
363 GstBuffer * buffer, guint64 data)
364 {
365 GstBaseTransform *trans;
366 GstMessage *m;
367 guint64 duration, timestamp, running_time, stream_time;
368
369 trans = GST_BASE_TRANSFORM_CAST (simplevideomarkdetect);
370
371 /* get timestamps */
372 timestamp = GST_BUFFER_TIMESTAMP (buffer);
373 duration = GST_BUFFER_DURATION (buffer);
374 running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
375 timestamp);
376 stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
377 timestamp);
378
379 /* post message */
380 m = gst_message_new_element (GST_OBJECT_CAST (simplevideomarkdetect),
381 gst_structure_new ("GstSimpleVideoMarkDetect",
382 "have-pattern", G_TYPE_BOOLEAN, simplevideomarkdetect->in_pattern,
383 "timestamp", G_TYPE_UINT64, timestamp,
384 "stream-time", G_TYPE_UINT64, stream_time,
385 "running-time", G_TYPE_UINT64, running_time,
386 "duration", G_TYPE_UINT64, duration,
387 "data", G_TYPE_UINT64, data, NULL));
388 gst_element_post_message (GST_ELEMENT_CAST (simplevideomarkdetect), m);
389 }
390
391 static gdouble
gst_video_detect_calc_brightness(GstSimpleVideoMarkDetect * simplevideomarkdetect,guint8 * data,gint width,gint height,gint row_stride,gint pixel_stride)392 gst_video_detect_calc_brightness (GstSimpleVideoMarkDetect *
393 simplevideomarkdetect, guint8 * data, gint width, gint height,
394 gint row_stride, gint pixel_stride)
395 {
396 gint i, j;
397 guint64 sum;
398
399 sum = 0;
400 for (i = 0; i < height; i++) {
401 for (j = 0; j < width; j++) {
402 sum += data[pixel_stride * j];
403 }
404 data += row_stride;
405 }
406 return sum / (255.0 * width * height);
407 }
408
409 static gint
calculate_pw(gint pw,gint x,gint width)410 calculate_pw (gint pw, gint x, gint width)
411 {
412 if (x < 0)
413 pw += x;
414 else if ((x + pw) > width)
415 pw = width - x;
416
417 return pw;
418 }
419
420 static void
gst_video_detect_yuv(GstSimpleVideoMarkDetect * simplevideomarkdetect,GstVideoFrame * frame)421 gst_video_detect_yuv (GstSimpleVideoMarkDetect * simplevideomarkdetect,
422 GstVideoFrame * frame)
423 {
424 gdouble brightness;
425 gint i, pw, ph, row_stride, pixel_stride;
426 gint width, height, offset_calc, x, y;
427 guint8 *d;
428 guint64 pattern_data;
429 gint total_pattern;
430
431 width = frame->info.width;
432 height = frame->info.height;
433
434 pw = simplevideomarkdetect->pattern_width;
435 ph = simplevideomarkdetect->pattern_height;
436 row_stride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
437 pixel_stride = GST_VIDEO_FRAME_COMP_PSTRIDE (frame, 0);
438
439 d = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
440 /* move to start of bottom left, adjust for offsets */
441 offset_calc =
442 row_stride * (height - ph - simplevideomarkdetect->bottom_offset) +
443 pixel_stride * simplevideomarkdetect->left_offset;
444 x = simplevideomarkdetect->left_offset;
445 y = height - ph - simplevideomarkdetect->bottom_offset;
446
447 total_pattern =
448 simplevideomarkdetect->pattern_count +
449 simplevideomarkdetect->pattern_data_count;
450 /* If x and y offset values are outside the video, no need to analyze */
451 if ((x + (pw * total_pattern)) < 0 || x > width || (y + height) < 0
452 || y > height) {
453 GST_ERROR_OBJECT (simplevideomarkdetect,
454 "simplevideomarkdetect pattern is outside the video. Not Analyzing.");
455 return;
456 }
457
458 /* Offset calculation less than 0, then reset to 0 */
459 if (offset_calc < 0)
460 offset_calc = 0;
461 /* Y position of mark is negative or pattern exceeds the video height,
462 then recalculate pattern height for partial display */
463 if (y < 0)
464 ph += y;
465 else if ((y + ph) > height)
466 ph = height - y;
467 /* If pattern height is less than 0, need not analyze anything */
468 if (ph < 0)
469 return;
470
471 /* move to start of bottom left */
472 d += offset_calc;
473
474 /* analyze the bottom left pixels */
475 for (i = 0; i < simplevideomarkdetect->pattern_count; i++) {
476 gint draw_pw;
477 /* calc brightness of width * height box */
478 brightness =
479 gst_video_detect_calc_brightness (simplevideomarkdetect, d, pw, ph,
480 row_stride, pixel_stride);
481
482 GST_DEBUG_OBJECT (simplevideomarkdetect, "brightness %f", brightness);
483
484 if (i & 1) {
485 /* odd pixels must be white, all pixels darker than the center +
486 * sensitivity are considered wrong. */
487 if (brightness <
488 (simplevideomarkdetect->pattern_center +
489 simplevideomarkdetect->pattern_sensitivity))
490 goto no_pattern;
491 } else {
492 /* even pixels must be black, pixels lighter than the center - sensitivity
493 * are considered wrong. */
494 if (brightness >
495 (simplevideomarkdetect->pattern_center -
496 simplevideomarkdetect->pattern_sensitivity))
497 goto no_pattern;
498 }
499
500 /* X position of mark is negative or pattern exceeds the video width,
501 then recalculate pattern width for partial display */
502 draw_pw = calculate_pw (pw, x, width);
503 /* If pattern width is less than 0, continue with the next pattern */
504 if (draw_pw < 0)
505 continue;
506
507 /* move to i-th pattern */
508 d += pixel_stride * draw_pw;
509 x += draw_pw;
510
511 if ((x + (pw * (total_pattern - i - 1))) < 0 || x >= width)
512 break;
513 }
514 GST_DEBUG_OBJECT (simplevideomarkdetect, "found pattern");
515
516 pattern_data = 0;
517
518 /* get the data of the pattern */
519 for (i = 0; i < simplevideomarkdetect->pattern_data_count; i++) {
520 gint draw_pw;
521 /* calc brightness of width * height box */
522 brightness =
523 gst_video_detect_calc_brightness (simplevideomarkdetect, d, pw, ph,
524 row_stride, pixel_stride);
525 /* update pattern, we just use the center to decide between black and white. */
526 pattern_data <<= 1;
527 if (brightness > simplevideomarkdetect->pattern_center)
528 pattern_data |= 1;
529
530 /* X position of mark is negative or pattern exceeds the video width,
531 then recalculate pattern width for partial display */
532 draw_pw = calculate_pw (pw, x, width);
533 /* If pattern width is less than 0, continue with the next pattern */
534 if (draw_pw < 0)
535 continue;
536
537 /* move to i-th pattern data */
538 d += pixel_stride * draw_pw;
539 x += draw_pw;
540
541 if ((x + (pw * (simplevideomarkdetect->pattern_data_count - i - 1))) < 0
542 || x >= width)
543 break;
544 }
545
546 GST_DEBUG_OBJECT (simplevideomarkdetect, "have data %" G_GUINT64_FORMAT,
547 pattern_data);
548
549 simplevideomarkdetect->in_pattern = TRUE;
550 gst_video_detect_post_message (simplevideomarkdetect, frame->buffer,
551 pattern_data);
552
553 return;
554
555 no_pattern:
556 {
557 GST_DEBUG_OBJECT (simplevideomarkdetect, "no pattern found");
558 if (simplevideomarkdetect->in_pattern) {
559 simplevideomarkdetect->in_pattern = FALSE;
560 gst_video_detect_post_message (simplevideomarkdetect, frame->buffer, 0);
561 }
562 return;
563 }
564 }
565
566 static GstFlowReturn
gst_video_detect_transform_frame_ip(GstVideoFilter * filter,GstVideoFrame * frame)567 gst_video_detect_transform_frame_ip (GstVideoFilter * filter,
568 GstVideoFrame * frame)
569 {
570 GstSimpleVideoMarkDetect *simplevideomarkdetect =
571 GST_SIMPLE_VIDEO_MARK_DETECT (filter);
572
573 GST_DEBUG_OBJECT (simplevideomarkdetect, "transform_frame_ip");
574
575 gst_video_detect_yuv (simplevideomarkdetect, frame);
576
577 return GST_FLOW_OK;
578 }
579