1 /* GStreamer
2 * Copyright (C) <2006> Wim Taymans <wim@fluendo.com>
3 *
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Library General Public
6 * License as published by the Free Software Foundation; either
7 * version 2 of the License, or (at your option) any later version.
8 *
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Library General Public License for more details.
13 *
14 * You should have received a copy of the GNU Library General Public
15 * License along with this library; if not, write to the
16 * Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
17 * Boston, MA 02110-1335, USA.
18 */
19 /**
20 * SECTION:element-videoanalyse
21 * @title: videoanalyse
22 *
23 * This plugin analyses every video frame and if the #GstVideoAnalyse:message
24 * property is %TRUE, posts an element message with video statistics called
25 * `GstVideoAnalyse`.
26 *
27 * The message's structure contains these fields:
28 *
29 * * #GstClockTime `timestamp`: the timestamp of the buffer that triggered the message.
30 *
31 * * #GstClockTime `stream-time`: the stream time of the buffer.
32 *
33 * * #GstClockTime `running-time`: the running_time of the buffer.
34 *
35 * * #GstClockTime`duration`:the duration of the buffer.
36 *
37 * * #gdouble`luma-average`: the average brightness of the frame. Range: 0.0-1.0
38 *
39 * * #gdouble`luma-variance`: the brightness variance of the frame.
40 *
41 * ## Example launch line
42 * |[
43 * gst-launch-1.0 -m videotestsrc ! videoanalyse ! videoconvert ! ximagesink
44 * ]| This pipeline emits messages to the console for each frame that has been analysed.
45 *
46 */
47
48 #ifdef HAVE_CONFIG_H
49 #include "config.h"
50 #endif
51
52 #include <gst/gst.h>
53 #include <gst/video/video.h>
54 #include <gst/video/gstvideofilter.h>
55 #include "gstvideoanalyse.h"
56
57 GST_DEBUG_CATEGORY_STATIC (gst_video_analyse_debug_category);
58 #define GST_CAT_DEFAULT gst_video_analyse_debug_category
59
60 /* prototypes */
61
62
63 static void gst_video_analyse_set_property (GObject * object,
64 guint property_id, const GValue * value, GParamSpec * pspec);
65 static void gst_video_analyse_get_property (GObject * object,
66 guint property_id, GValue * value, GParamSpec * pspec);
67 static void gst_video_analyse_finalize (GObject * object);
68
69 static GstFlowReturn gst_video_analyse_transform_frame_ip (GstVideoFilter *
70 filter, GstVideoFrame * frame);
71
72 enum
73 {
74 PROP_0,
75 PROP_MESSAGE
76 };
77
78 #define DEFAULT_MESSAGE TRUE
79
80 #define VIDEO_CAPS \
81 GST_VIDEO_CAPS_MAKE("{ I420, YV12, Y444, Y42B, Y41B }")
82
83
84 /* class initialization */
85
86 G_DEFINE_TYPE_WITH_CODE (GstVideoAnalyse, gst_video_analyse,
87 GST_TYPE_VIDEO_FILTER,
88 GST_DEBUG_CATEGORY_INIT (gst_video_analyse_debug_category, "videoanalyse",
89 0, "debug category for videoanalyse element"));
90 GST_ELEMENT_REGISTER_DEFINE (videoanalyse, "videoanalyse",
91 GST_RANK_NONE, GST_TYPE_VIDEO_ANALYSE);
92
93 static void
gst_video_analyse_class_init(GstVideoAnalyseClass * klass)94 gst_video_analyse_class_init (GstVideoAnalyseClass * klass)
95 {
96 GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
97 GstVideoFilterClass *video_filter_class = GST_VIDEO_FILTER_CLASS (klass);
98
99 gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
100 gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
101 gst_caps_from_string (VIDEO_CAPS)));
102 gst_element_class_add_pad_template (GST_ELEMENT_CLASS (klass),
103 gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
104 gst_caps_from_string (VIDEO_CAPS)));
105
106 gst_element_class_set_static_metadata (GST_ELEMENT_CLASS (klass),
107 "Video analyser", "Filter/Analyzer/Video",
108 "Analyse video signal", "Wim Taymans <wim@fluendo.com>");
109
110 gobject_class->set_property = gst_video_analyse_set_property;
111 gobject_class->get_property = gst_video_analyse_get_property;
112 gobject_class->finalize = gst_video_analyse_finalize;
113 video_filter_class->transform_frame_ip =
114 GST_DEBUG_FUNCPTR (gst_video_analyse_transform_frame_ip);
115
116 g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_MESSAGE,
117 g_param_spec_boolean ("message", "Message",
118 "Post statics messages",
119 DEFAULT_MESSAGE,
120 G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
121 //trans_class->passthrough_on_same_caps = TRUE;
122 }
123
124 static void
gst_video_analyse_init(GstVideoAnalyse * videoanalyse)125 gst_video_analyse_init (GstVideoAnalyse * videoanalyse)
126 {
127 }
128
129 void
gst_video_analyse_set_property(GObject * object,guint property_id,const GValue * value,GParamSpec * pspec)130 gst_video_analyse_set_property (GObject * object, guint property_id,
131 const GValue * value, GParamSpec * pspec)
132 {
133 GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (object);
134
135 GST_DEBUG_OBJECT (videoanalyse, "set_property");
136
137 switch (property_id) {
138 case PROP_MESSAGE:
139 videoanalyse->message = g_value_get_boolean (value);
140 break;
141 default:
142 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
143 break;
144 }
145 }
146
147 void
gst_video_analyse_get_property(GObject * object,guint property_id,GValue * value,GParamSpec * pspec)148 gst_video_analyse_get_property (GObject * object, guint property_id,
149 GValue * value, GParamSpec * pspec)
150 {
151 GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (object);
152
153 GST_DEBUG_OBJECT (videoanalyse, "get_property");
154
155 switch (property_id) {
156 case PROP_MESSAGE:
157 g_value_set_boolean (value, videoanalyse->message);
158 break;
159 default:
160 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
161 break;
162 }
163 }
164
165 void
gst_video_analyse_finalize(GObject * object)166 gst_video_analyse_finalize (GObject * object)
167 {
168 GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (object);
169
170 GST_DEBUG_OBJECT (videoanalyse, "finalize");
171
172 /* clean up object here */
173
174 G_OBJECT_CLASS (gst_video_analyse_parent_class)->finalize (object);
175 }
176
177 static void
gst_video_analyse_post_message(GstVideoAnalyse * videoanalyse,GstVideoFrame * frame)178 gst_video_analyse_post_message (GstVideoAnalyse * videoanalyse,
179 GstVideoFrame * frame)
180 {
181 GstBaseTransform *trans;
182 GstMessage *m;
183 guint64 duration, timestamp, running_time, stream_time;
184
185 trans = GST_BASE_TRANSFORM_CAST (videoanalyse);
186
187 /* get timestamps */
188 timestamp = GST_BUFFER_TIMESTAMP (frame->buffer);
189 duration = GST_BUFFER_DURATION (frame->buffer);
190 running_time = gst_segment_to_running_time (&trans->segment, GST_FORMAT_TIME,
191 timestamp);
192 stream_time = gst_segment_to_stream_time (&trans->segment, GST_FORMAT_TIME,
193 timestamp);
194
195 m = gst_message_new_element (GST_OBJECT_CAST (videoanalyse),
196 gst_structure_new ("GstVideoAnalyse",
197 "timestamp", G_TYPE_UINT64, timestamp,
198 "stream-time", G_TYPE_UINT64, stream_time,
199 "running-time", G_TYPE_UINT64, running_time,
200 "duration", G_TYPE_UINT64, duration,
201 "luma-average", G_TYPE_DOUBLE, videoanalyse->luma_average,
202 "luma-variance", G_TYPE_DOUBLE, videoanalyse->luma_variance, NULL));
203
204 gst_element_post_message (GST_ELEMENT_CAST (videoanalyse), m);
205 }
206
207 static void
gst_video_analyse_planar(GstVideoAnalyse * videoanalyse,GstVideoFrame * frame)208 gst_video_analyse_planar (GstVideoAnalyse * videoanalyse, GstVideoFrame * frame)
209 {
210 guint64 sum;
211 gint avg, diff;
212 gint i, j;
213 guint8 *d;
214 gint width = frame->info.width;
215 gint height = frame->info.height;
216 gint stride;
217
218 d = frame->data[0];
219 stride = frame->info.stride[0];
220 sum = 0;
221 /* do brightness as average of pixel brightness in 0.0 to 1.0 */
222 for (i = 0; i < height; i++) {
223 for (j = 0; j < width; j++) {
224 sum += d[j];
225 }
226 d += stride;
227 }
228 avg = sum / (width * height);
229 videoanalyse->luma_average = sum / (255.0 * width * height);
230
231 d = frame->data[0];
232 stride = frame->info.stride[0];
233 sum = 0;
234 /* do variance */
235 for (i = 0; i < height; i++) {
236 for (j = 0; j < width; j++) {
237 diff = (avg - d[j]);
238 sum += diff * diff;
239 }
240 d += stride;
241 }
242 videoanalyse->luma_variance = sum / (255.0 * 255.0 * width * height);
243 }
244
245 static GstFlowReturn
gst_video_analyse_transform_frame_ip(GstVideoFilter * filter,GstVideoFrame * frame)246 gst_video_analyse_transform_frame_ip (GstVideoFilter * filter,
247 GstVideoFrame * frame)
248 {
249 GstVideoAnalyse *videoanalyse = GST_VIDEO_ANALYSE (filter);
250
251 GST_DEBUG_OBJECT (videoanalyse, "transform_frame_ip");
252
253 gst_video_analyse_planar (videoanalyse, frame);
254
255 if (videoanalyse->message)
256 gst_video_analyse_post_message (videoanalyse, frame);
257
258 return GST_FLOW_OK;
259 }
260