1 /* GStreamer
2 * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
3 * Copyright (C) <2009> Sebastian Dröge <sebastian.droege@collabora.co.uk>
4 *
5 * EffecTV:
6 * Copyright (C) 2001-2002 FUKUCHI Kentarou
7 *
8 * EdgeTV - detects edge and display it in good old computer way
9 *
10 * EffecTV is free software. This library is free software;
11 * you can redistribute it and/or
12 * modify it under the terms of the GNU Library General Public
13 * License as published by the Free Software Foundation; either
14 * version 2 of the License, or (at your option) any later version.
15 *
16 * This library is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * Library General Public License for more details.
20 *
21 * You should have received a copy of the GNU Library General Public
22 * License along with this library; if not, write to the
23 * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
24 * Boston, MA 02110-1301, USA.
25 */
26
27 /**
28 * SECTION:element-edgetv
29 * @title: edgetv
30 *
31 * EdgeTV detects edges and display it in good old low resolution
32 * computer way.
33 *
34 * ## Example launch line
35 * |[
36 * gst-launch-1.0 -v videotestsrc ! edgetv ! videoconvert ! autovideosink
37 * ]| This pipeline shows the effect of edgetv on a test stream.
38 *
39 */
40
41 #ifdef HAVE_CONFIG_H
42 #include "config.h"
43 #endif
44
45 #include <string.h>
46
47 #include "gstedge.h"
48 #include "gsteffectv.h"
49
50 #define gst_edgetv_parent_class parent_class
51 G_DEFINE_TYPE (GstEdgeTV, gst_edgetv, GST_TYPE_VIDEO_FILTER);
52 GST_ELEMENT_REGISTER_DEFINE (edgetv, "edgetv", GST_RANK_NONE, GST_TYPE_EDGETV);
53
54 #if G_BYTE_ORDER == G_LITTLE_ENDIAN
55 #define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ BGRx, RGBx }")
56 #else
57 #define CAPS_STR GST_VIDEO_CAPS_MAKE ("{ xBGR, xRGB }")
58 #endif
59
60 static GstStaticPadTemplate gst_edgetv_src_template =
61 GST_STATIC_PAD_TEMPLATE ("src",
62 GST_PAD_SRC,
63 GST_PAD_ALWAYS,
64 GST_STATIC_CAPS (CAPS_STR)
65 );
66
67 static GstStaticPadTemplate gst_edgetv_sink_template =
68 GST_STATIC_PAD_TEMPLATE ("sink",
69 GST_PAD_SINK,
70 GST_PAD_ALWAYS,
71 GST_STATIC_CAPS (CAPS_STR)
72 );
73
74 static gboolean
gst_edgetv_set_info(GstVideoFilter * filter,GstCaps * incaps,GstVideoInfo * in_info,GstCaps * outcaps,GstVideoInfo * out_info)75 gst_edgetv_set_info (GstVideoFilter * filter, GstCaps * incaps,
76 GstVideoInfo * in_info, GstCaps * outcaps, GstVideoInfo * out_info)
77 {
78 GstEdgeTV *edgetv = GST_EDGETV (filter);
79 guint map_size;
80 gint width, height;
81
82 width = GST_VIDEO_INFO_WIDTH (in_info);
83 height = GST_VIDEO_INFO_HEIGHT (in_info);
84
85 edgetv->map_width = width / 4;
86 edgetv->map_height = height / 4;
87 edgetv->video_width_margin = width % 4;
88
89 map_size = edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2;
90
91 g_free (edgetv->map);
92 edgetv->map = (guint32 *) g_malloc0 (map_size);
93
94 return TRUE;
95 }
96
97 static GstFlowReturn
gst_edgetv_transform_frame(GstVideoFilter * vfilter,GstVideoFrame * in_frame,GstVideoFrame * out_frame)98 gst_edgetv_transform_frame (GstVideoFilter * vfilter, GstVideoFrame * in_frame,
99 GstVideoFrame * out_frame)
100 {
101 GstEdgeTV *filter = GST_EDGETV (vfilter);
102 gint x, y, r, g, b;
103 guint32 *src, *dest;
104 guint32 p, q;
105 guint32 v0, v1, v2, v3;
106 gint width, map_height, map_width;
107 gint video_width_margin;
108 guint32 *map;
109 GstFlowReturn ret = GST_FLOW_OK;
110
111 map = filter->map;
112 map_height = filter->map_height;
113 map_width = filter->map_width;
114 video_width_margin = filter->video_width_margin;
115
116 src = GST_VIDEO_FRAME_PLANE_DATA (in_frame, 0);
117 dest = GST_VIDEO_FRAME_PLANE_DATA (out_frame, 0);
118
119 width = GST_VIDEO_FRAME_WIDTH (in_frame);
120
121 src += width * 4 + 4;
122 dest += width * 4 + 4;
123
124 for (y = 1; y < map_height - 1; y++) {
125 for (x = 1; x < map_width - 1; x++) {
126 p = *src;
127 q = *(src - 4);
128
129 /* difference between the current pixel and left neighbor. */
130 r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
131 g = ((p & 0xff00) - (q & 0xff00)) >> 8;
132 b = (p & 0xff) - (q & 0xff);
133 r *= r;
134 g *= g;
135 b *= b;
136 r = r >> 5; /* To lack the lower bit for saturated addition, */
137 g = g >> 5; /* divide the value with 32, instead of 16. It is */
138 b = b >> 4; /* same as `v2 &= 0xfefeff' */
139 if (r > 127)
140 r = 127;
141 if (g > 127)
142 g = 127;
143 if (b > 255)
144 b = 255;
145 v2 = (r << 17) | (g << 9) | b;
146
147 /* difference between the current pixel and upper neighbor. */
148 q = *(src - width * 4);
149 r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
150 g = ((p & 0xff00) - (q & 0xff00)) >> 8;
151 b = (p & 0xff) - (q & 0xff);
152 r *= r;
153 g *= g;
154 b *= b;
155 r = r >> 5;
156 g = g >> 5;
157 b = b >> 4;
158 if (r > 127)
159 r = 127;
160 if (g > 127)
161 g = 127;
162 if (b > 255)
163 b = 255;
164 v3 = (r << 17) | (g << 9) | b;
165
166 v0 = map[(y - 1) * map_width * 2 + x * 2];
167 v1 = map[y * map_width * 2 + (x - 1) * 2 + 1];
168 map[y * map_width * 2 + x * 2] = v2;
169 map[y * map_width * 2 + x * 2 + 1] = v3;
170 r = v0 + v1;
171 g = r & 0x01010100;
172 dest[0] = r | (g - (g >> 8));
173 r = v0 + v3;
174 g = r & 0x01010100;
175 dest[1] = r | (g - (g >> 8));
176 dest[2] = v3;
177 dest[3] = v3;
178 r = v2 + v1;
179 g = r & 0x01010100;
180 dest[width] = r | (g - (g >> 8));
181 r = v2 + v3;
182 g = r & 0x01010100;
183 dest[width + 1] = r | (g - (g >> 8));
184 dest[width + 2] = v3;
185 dest[width + 3] = v3;
186 dest[width * 2] = v2;
187 dest[width * 2 + 1] = v2;
188 dest[width * 2 + 2] = 0;
189 dest[width * 2 + 3] = 0;
190 dest[width * 3] = v2;
191 dest[width * 3 + 1] = v2;
192 dest[width * 3 + 2] = 0;
193 dest[width * 3 + 3] = 0;
194
195 src += 4;
196 dest += 4;
197 }
198 src += width * 3 + 8 + video_width_margin;
199 dest += width * 3 + 8 + video_width_margin;
200 }
201
202 return ret;
203 }
204
205 static gboolean
gst_edgetv_start(GstBaseTransform * trans)206 gst_edgetv_start (GstBaseTransform * trans)
207 {
208 GstEdgeTV *edgetv = GST_EDGETV (trans);
209
210 if (edgetv->map)
211 memset (edgetv->map, 0,
212 edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
213 return TRUE;
214 }
215
216 static void
gst_edgetv_finalize(GObject * object)217 gst_edgetv_finalize (GObject * object)
218 {
219 GstEdgeTV *edgetv = GST_EDGETV (object);
220
221 g_free (edgetv->map);
222 edgetv->map = NULL;
223
224 G_OBJECT_CLASS (parent_class)->finalize (object);
225 }
226
227 static void
gst_edgetv_class_init(GstEdgeTVClass * klass)228 gst_edgetv_class_init (GstEdgeTVClass * klass)
229 {
230 GObjectClass *gobject_class = (GObjectClass *) klass;
231 GstElementClass *gstelement_class = (GstElementClass *) klass;
232 GstBaseTransformClass *trans_class = (GstBaseTransformClass *) klass;
233 GstVideoFilterClass *vfilter_class = (GstVideoFilterClass *) klass;
234
235 gobject_class->finalize = gst_edgetv_finalize;
236
237 gst_element_class_set_static_metadata (gstelement_class, "EdgeTV effect",
238 "Filter/Effect/Video",
239 "Apply edge detect on video", "Wim Taymans <wim.taymans@chello.be>");
240
241 gst_element_class_add_static_pad_template (gstelement_class,
242 &gst_edgetv_sink_template);
243 gst_element_class_add_static_pad_template (gstelement_class,
244 &gst_edgetv_src_template);
245
246 trans_class->start = GST_DEBUG_FUNCPTR (gst_edgetv_start);
247
248 vfilter_class->set_info = GST_DEBUG_FUNCPTR (gst_edgetv_set_info);
249 vfilter_class->transform_frame =
250 GST_DEBUG_FUNCPTR (gst_edgetv_transform_frame);
251 }
252
253 static void
gst_edgetv_init(GstEdgeTV * edgetv)254 gst_edgetv_init (GstEdgeTV * edgetv)
255 {
256 }
257