• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * GStreamer
3  * Copyright (C) 2011 Robert Swain <robert.swain@collabora.co.uk>
4  *
5  * Permission is hereby granted, free of charge, to any person obtaining a
6  * copy of this software and associated documentation files (the "Software"),
7  * to deal in the Software without restriction, including without limitation
8  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9  * and/or sell copies of the Software, and to permit persons to whom the
10  * Software is furnished to do so, subject to the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included in
13  * all copies or substantial portions of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18  * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  *
23  * Alternatively, the contents of this file may be used under the
24  * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
25  * which case the following provisions apply instead of the ones
26  * mentioned above:
27  *
28  * This library is free software; you can redistribute it and/or
29  * modify it under the terms of the GNU Library General Public
30  * License as published by the Free Software Foundation; either
31  * version 2 of the License, or (at your option) any later version.
32  *
33  * This library is distributed in the hope that it will be useful,
34  * but WITHOUT ANY WARRANTY; without even the implied warranty of
35  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
36  * Library General Public License for more details.
37  *
38  * You should have received a copy of the GNU Library General Public
39  * License along with this library; if not, write to the
40  * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
41  * Boston, MA 02110-1301, USA.
42  */
43 
44 /**
45  * SECTION:element-fieldanalysis
46  * @title: fieldanalysis
47  *
48  * Analyse fields from video buffers to identify whether the buffers are
49  * progressive/telecined/interlaced and, if telecined, the telecine pattern
50  * used.
51  *
52  * ## Example launch line
53  * |[
54  * gst-launch-1.0 -v uridecodebin uri=/path/to/foo.bar ! fieldanalysis ! deinterlace ! videoconvert ! autovideosink
55  * ]| This pipeline will analyse a video stream with default metrics and thresholds and output progressive frames.
56  *
57  */
58 
59 #ifdef HAVE_CONFIG_H
60 #  include <config.h>
61 #endif
62 
63 #include <gst/gst.h>
64 #include <gst/video/video.h>
65 #include <string.h>
66 #include <stdlib.h>             /* for abs() */
67 
68 #include "gstfieldanalysis.h"
69 #include "gstfieldanalysisorc.h"
70 
71 GST_DEBUG_CATEGORY_STATIC (gst_field_analysis_debug);
72 #define GST_CAT_DEFAULT gst_field_analysis_debug
73 
74 #define DEFAULT_FIELD_METRIC GST_FIELDANALYSIS_SSD
75 #define DEFAULT_FRAME_METRIC GST_FIELDANALYSIS_5_TAP
76 #define DEFAULT_NOISE_FLOOR 16
77 #define DEFAULT_FIELD_THRESH 0.08f
78 #define DEFAULT_FRAME_THRESH 0.002f
79 #define DEFAULT_COMB_METHOD METHOD_5_TAP
80 #define DEFAULT_SPATIAL_THRESH 9
81 #define DEFAULT_BLOCK_WIDTH 16
82 #define DEFAULT_BLOCK_HEIGHT 16
83 #define DEFAULT_BLOCK_THRESH 80
84 #define DEFAULT_IGNORED_LINES 2
85 
86 enum
87 {
88   PROP_0,
89   PROP_FIELD_METRIC,
90   PROP_FRAME_METRIC,
91   PROP_NOISE_FLOOR,
92   PROP_FIELD_THRESH,
93   PROP_FRAME_THRESH,
94   PROP_COMB_METHOD,
95   PROP_SPATIAL_THRESH,
96   PROP_BLOCK_WIDTH,
97   PROP_BLOCK_HEIGHT,
98   PROP_BLOCK_THRESH,
99   PROP_IGNORED_LINES
100 };
101 
102 static GstStaticPadTemplate sink_factory =
103 GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
104     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{YUY2,UYVY,Y42B,I420,YV12}")));
105 
106 static GstStaticPadTemplate src_factory =
107 GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
108     GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{YUY2,UYVY,Y42B,I420,YV12}")));
109 
110 G_DEFINE_TYPE (GstFieldAnalysis, gst_field_analysis, GST_TYPE_ELEMENT);
111 GST_ELEMENT_REGISTER_DEFINE_WITH_CODE (fieldanalysis, "fieldanalysis",
112     GST_RANK_NONE, GST_TYPE_FIELDANALYSIS,
113     GST_DEBUG_CATEGORY_INIT (gst_field_analysis_debug, "fieldanalysis", 0,
114         "Video field analysis");
115     );
116 
117 #define parent_class gst_field_analysis_parent_class
118 
119 static void gst_field_analysis_set_property (GObject * object, guint prop_id,
120     const GValue * value, GParamSpec * pspec);
121 static void gst_field_analysis_get_property (GObject * object, guint prop_id,
122     GValue * value, GParamSpec * pspec);
123 
124 static gboolean gst_field_analysis_sink_event (GstPad * pad, GstObject * parent,
125     GstEvent * event);
126 static GstFlowReturn gst_field_analysis_chain (GstPad * pad, GstObject * parent,
127     GstBuffer * buf);
128 static GstStateChangeReturn gst_field_analysis_change_state (GstElement *
129     element, GstStateChange transition);
130 static void gst_field_analysis_finalize (GObject * self);
131 
132 static GQueue *gst_field_analysis_flush_frames (GstFieldAnalysis * filter);
133 
134 typedef enum
135 {
136   GST_FIELDANALYSIS_SAD,
137   GST_FIELDANALYSIS_SSD,
138   GST_FIELDANALYSIS_3_TAP
139 } GstFieldAnalysisFieldMetric;
140 
141 #define GST_TYPE_FIELDANALYSIS_FIELD_METRIC (gst_fieldanalysis_field_metric_get_type())
142 static GType
gst_fieldanalysis_field_metric_get_type(void)143 gst_fieldanalysis_field_metric_get_type (void)
144 {
145   static GType fieldanalysis_field_metric_type = 0;
146 
147   if (!fieldanalysis_field_metric_type) {
148     static const GEnumValue fieldanalysis_field_metrics[] = {
149       {GST_FIELDANALYSIS_SAD, "Sum of Absolute Differences", "sad"},
150       {GST_FIELDANALYSIS_SSD, "Sum of Squared Differences", "ssd"},
151       {GST_FIELDANALYSIS_3_TAP, "Difference of 3-tap [1,4,1] Horizontal Filter",
152           "3-tap"},
153       {0, NULL, NULL},
154     };
155 
156     fieldanalysis_field_metric_type =
157         g_enum_register_static ("GstFieldAnalysisFieldMetric",
158         fieldanalysis_field_metrics);
159   }
160 
161   return fieldanalysis_field_metric_type;
162 }
163 
164 typedef enum
165 {
166   GST_FIELDANALYSIS_5_TAP,
167   GST_FIELDANALYSIS_WINDOWED_COMB
168 } GstFieldAnalysisFrameMetric;
169 
170 #define GST_TYPE_FIELDANALYSIS_FRAME_METRIC (gst_fieldanalysis_frame_metric_get_type())
171 static GType
gst_fieldanalysis_frame_metric_get_type(void)172 gst_fieldanalysis_frame_metric_get_type (void)
173 {
174   static GType fieldanalysis_frame_metric_type = 0;
175 
176   if (!fieldanalysis_frame_metric_type) {
177     static const GEnumValue fieldanalyis_frame_metrics[] = {
178       {GST_FIELDANALYSIS_5_TAP, "5-tap [1,-3,4,-3,1] Vertical Filter", "5-tap"},
179       {GST_FIELDANALYSIS_WINDOWED_COMB,
180             "Windowed Comb Detection (not optimised)",
181           "windowed-comb"},
182       {0, NULL, NULL},
183     };
184 
185     fieldanalysis_frame_metric_type =
186         g_enum_register_static ("GstFieldAnalysisFrameMetric",
187         fieldanalyis_frame_metrics);
188   }
189 
190   return fieldanalysis_frame_metric_type;
191 }
192 
193 #define GST_TYPE_FIELDANALYSIS_COMB_METHOD (gst_fieldanalysis_comb_method_get_type())
194 static GType
gst_fieldanalysis_comb_method_get_type(void)195 gst_fieldanalysis_comb_method_get_type (void)
196 {
197   static GType fieldanalysis_comb_method_type = 0;
198 
199   if (!fieldanalysis_comb_method_type) {
200     static const GEnumValue fieldanalyis_comb_methods[] = {
201       {METHOD_32DETECT,
202             "Difference to above sample in same field small and difference to sample in other field large",
203           "32-detect"},
204       {METHOD_IS_COMBED,
205             "Differences between current sample and the above/below samples in other field multiplied together, larger than squared spatial threshold (from Tritical's isCombed)",
206           "isCombed"},
207       {METHOD_5_TAP,
208             "5-tap [1,-3,4,-3,1] vertical filter result is larger than spatial threshold*6",
209           "5-tap"},
210       {0, NULL, NULL},
211     };
212 
213     fieldanalysis_comb_method_type =
214         g_enum_register_static ("FieldAnalysisCombMethod",
215         fieldanalyis_comb_methods);
216   }
217 
218   return fieldanalysis_comb_method_type;
219 }
220 
221 static void
gst_field_analysis_class_init(GstFieldAnalysisClass * klass)222 gst_field_analysis_class_init (GstFieldAnalysisClass * klass)
223 {
224   GObjectClass *gobject_class;
225   GstElementClass *gstelement_class;
226 
227   gobject_class = (GObjectClass *) klass;
228   gstelement_class = (GstElementClass *) klass;
229 
230   gobject_class->set_property = gst_field_analysis_set_property;
231   gobject_class->get_property = gst_field_analysis_get_property;
232   gobject_class->finalize = gst_field_analysis_finalize;
233 
234   g_object_class_install_property (gobject_class, PROP_FIELD_METRIC,
235       g_param_spec_enum ("field-metric", "Field Metric",
236           "Metric to be used for comparing same parity fields to decide if they are a repeated field for telecine",
237           GST_TYPE_FIELDANALYSIS_FIELD_METRIC, DEFAULT_FIELD_METRIC,
238           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
239   g_object_class_install_property (gobject_class, PROP_FRAME_METRIC,
240       g_param_spec_enum ("frame-metric", "Frame Metric",
241           "Metric to be used for comparing opposite parity fields to decide if they are a progressive frame",
242           GST_TYPE_FIELDANALYSIS_FRAME_METRIC, DEFAULT_FRAME_METRIC,
243           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
244   g_object_class_install_property (gobject_class, PROP_NOISE_FLOOR,
245       g_param_spec_uint ("noise-floor", "Noise Floor",
246           "Noise floor for appropriate metrics (per-pixel metric values with a score less than this will be ignored)",
247           0, G_MAXUINT32,
248           DEFAULT_NOISE_FLOOR, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
249   g_object_class_install_property (gobject_class, PROP_FIELD_THRESH,
250       g_param_spec_float ("field-threshold", "Field Threshold",
251           "Threshold for field metric decisions", 0.0f, G_MAXFLOAT,
252           DEFAULT_FIELD_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
253   g_object_class_install_property (gobject_class, PROP_FRAME_THRESH,
254       g_param_spec_float ("frame-threshold", "Frame Threshold",
255           "Threshold for frame metric decisions", 0.0f, G_MAXFLOAT,
256           DEFAULT_FRAME_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
257   g_object_class_install_property (gobject_class, PROP_COMB_METHOD,
258       g_param_spec_enum ("comb-method", "Comb-detection Method",
259           "Metric to be used for identifying comb artifacts if using windowed comb detection",
260           GST_TYPE_FIELDANALYSIS_COMB_METHOD, DEFAULT_COMB_METHOD,
261           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
262   g_object_class_install_property (gobject_class, PROP_SPATIAL_THRESH,
263       g_param_spec_int64 ("spatial-threshold", "Spatial Combing Threshold",
264           "Threshold for combing metric decisions", 0, G_MAXINT64,
265           DEFAULT_SPATIAL_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
266   g_object_class_install_property (gobject_class, PROP_BLOCK_WIDTH,
267       g_param_spec_uint64 ("block-width", "Block width",
268           "Block width for windowed comb detection", 1, G_MAXUINT64,
269           DEFAULT_BLOCK_WIDTH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
270   g_object_class_install_property (gobject_class, PROP_BLOCK_HEIGHT,
271       g_param_spec_uint64 ("block-height", "Block height",
272           "Block height for windowed comb detection", 0, G_MAXUINT64,
273           DEFAULT_BLOCK_HEIGHT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
274   g_object_class_install_property (gobject_class, PROP_BLOCK_THRESH,
275       g_param_spec_uint64 ("block-threshold", "Block threshold",
276           "Block threshold for windowed comb detection", 0, G_MAXUINT64,
277           DEFAULT_BLOCK_THRESH, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
278   g_object_class_install_property (gobject_class, PROP_IGNORED_LINES,
279       g_param_spec_uint64 ("ignored-lines", "Ignored lines",
280           "Ignore this many lines from the top and bottom for windowed comb detection",
281           2, G_MAXUINT64, DEFAULT_IGNORED_LINES,
282           G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
283 
284   gstelement_class->change_state =
285       GST_DEBUG_FUNCPTR (gst_field_analysis_change_state);
286 
287   gst_element_class_set_static_metadata (gstelement_class,
288       "Video field analysis",
289       "Filter/Analysis/Video",
290       "Analyse fields from video frames to identify if they are progressive/telecined/interlaced",
291       "Robert Swain <robert.swain@collabora.co.uk>");
292 
293   gst_element_class_add_static_pad_template (gstelement_class, &src_factory);
294   gst_element_class_add_static_pad_template (gstelement_class, &sink_factory);
295 
296   gst_type_mark_as_plugin_api (GST_TYPE_FIELDANALYSIS_COMB_METHOD, 0);
297   gst_type_mark_as_plugin_api (GST_TYPE_FIELDANALYSIS_FIELD_METRIC, 0);
298   gst_type_mark_as_plugin_api (GST_TYPE_FIELDANALYSIS_FRAME_METRIC, 0);
299 }
300 
301 static gfloat same_parity_sad (GstFieldAnalysis * filter,
302     FieldAnalysisFields (*history)[2]);
303 static gfloat same_parity_ssd (GstFieldAnalysis * filter,
304     FieldAnalysisFields (*history)[2]);
305 static gfloat same_parity_3_tap (GstFieldAnalysis * filter,
306     FieldAnalysisFields (*history)[2]);
307 static gfloat opposite_parity_5_tap (GstFieldAnalysis * filter,
308     FieldAnalysisFields (*history)[2]);
309 static guint64 block_score_for_row_32detect (GstFieldAnalysis * filter,
310     FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1);
311 static guint64 block_score_for_row_iscombed (GstFieldAnalysis * filter,
312     FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1);
313 static guint64 block_score_for_row_5_tap (GstFieldAnalysis * filter,
314     FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1);
315 static gfloat opposite_parity_windowed_comb (GstFieldAnalysis * filter,
316     FieldAnalysisFields (*history)[2]);
317 
318 static void
gst_field_analysis_clear_frames(GstFieldAnalysis * filter)319 gst_field_analysis_clear_frames (GstFieldAnalysis * filter)
320 {
321   GST_DEBUG_OBJECT (filter, "Clearing %d frames", filter->nframes);
322   while (filter->nframes) {
323     gst_video_frame_unmap (&filter->frames[filter->nframes - 1].frame);
324     filter->nframes--;
325   }
326 }
327 
328 static void
gst_field_analysis_reset(GstFieldAnalysis * filter)329 gst_field_analysis_reset (GstFieldAnalysis * filter)
330 {
331   gst_field_analysis_clear_frames (filter);
332   GST_DEBUG_OBJECT (filter, "Resetting context");
333   memset (filter->frames, 0, 2 * sizeof (FieldAnalysisHistory));
334   filter->is_telecine = FALSE;
335   filter->first_buffer = TRUE;
336   gst_video_info_init (&filter->vinfo);
337   g_free (filter->comb_mask);
338   filter->comb_mask = NULL;
339   g_free (filter->block_scores);
340   filter->block_scores = NULL;
341 }
342 
343 static void
gst_field_analysis_init(GstFieldAnalysis * filter)344 gst_field_analysis_init (GstFieldAnalysis * filter)
345 {
346   filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
347   gst_pad_set_event_function (filter->sinkpad,
348       GST_DEBUG_FUNCPTR (gst_field_analysis_sink_event));
349   gst_pad_set_chain_function (filter->sinkpad,
350       GST_DEBUG_FUNCPTR (gst_field_analysis_chain));
351 
352   filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
353 
354   gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
355   gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
356 
357   filter->nframes = 0;
358   gst_field_analysis_reset (filter);
359   filter->same_field = &same_parity_ssd;
360   filter->field_thresh = DEFAULT_FIELD_THRESH;
361   filter->same_frame = &opposite_parity_5_tap;
362   filter->frame_thresh = DEFAULT_FRAME_THRESH;
363   filter->noise_floor = DEFAULT_NOISE_FLOOR;
364   filter->block_score_for_row = &block_score_for_row_5_tap;
365   filter->spatial_thresh = DEFAULT_SPATIAL_THRESH;
366   filter->block_width = DEFAULT_BLOCK_WIDTH;
367   filter->block_height = DEFAULT_BLOCK_HEIGHT;
368   filter->block_thresh = DEFAULT_BLOCK_THRESH;
369   filter->ignored_lines = DEFAULT_IGNORED_LINES;
370 }
371 
372 static void
gst_field_analysis_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)373 gst_field_analysis_set_property (GObject * object, guint prop_id,
374     const GValue * value, GParamSpec * pspec)
375 {
376   GstFieldAnalysis *filter = GST_FIELDANALYSIS (object);
377 
378   switch (prop_id) {
379     case PROP_FIELD_METRIC:
380       switch (g_value_get_enum (value)) {
381         case GST_FIELDANALYSIS_SAD:
382           filter->same_field = &same_parity_sad;
383           break;
384         case GST_FIELDANALYSIS_SSD:
385           filter->same_field = &same_parity_ssd;
386           break;
387         case GST_FIELDANALYSIS_3_TAP:
388           filter->same_field = &same_parity_3_tap;
389           break;
390         default:
391           break;
392       }
393       break;
394     case PROP_FRAME_METRIC:
395       switch (g_value_get_enum (value)) {
396         case GST_FIELDANALYSIS_5_TAP:
397           filter->same_frame = &opposite_parity_5_tap;
398           break;
399         case GST_FIELDANALYSIS_WINDOWED_COMB:
400           filter->same_frame = &opposite_parity_windowed_comb;
401           break;
402         default:
403           break;
404       }
405       break;
406     case PROP_NOISE_FLOOR:
407       filter->noise_floor = g_value_get_uint (value);
408       break;
409     case PROP_FIELD_THRESH:
410       filter->field_thresh = g_value_get_float (value);
411       break;
412     case PROP_FRAME_THRESH:
413       filter->frame_thresh = g_value_get_float (value);
414       break;
415     case PROP_COMB_METHOD:
416       switch (g_value_get_enum (value)) {
417         case METHOD_32DETECT:
418           filter->block_score_for_row = &block_score_for_row_32detect;
419           break;
420         case METHOD_IS_COMBED:
421           filter->block_score_for_row = &block_score_for_row_iscombed;
422           break;
423         case METHOD_5_TAP:
424           filter->block_score_for_row = &block_score_for_row_5_tap;
425           break;
426         default:
427           break;
428       }
429       break;
430     case PROP_SPATIAL_THRESH:
431       filter->spatial_thresh = g_value_get_int64 (value);
432       break;
433     case PROP_BLOCK_WIDTH:
434       filter->block_width = g_value_get_uint64 (value);
435       if (GST_VIDEO_FRAME_WIDTH (&filter->frames[0].frame)) {
436         const gint frame_width =
437             GST_VIDEO_FRAME_WIDTH (&filter->frames[0].frame);
438         if (filter->block_scores) {
439           gsize nbytes = (frame_width / filter->block_width) * sizeof (guint);
440           filter->block_scores = g_realloc (filter->block_scores, nbytes);
441           memset (filter->block_scores, 0, nbytes);
442         } else {
443           filter->block_scores =
444               g_malloc0 ((frame_width / filter->block_width) * sizeof (guint));
445         }
446       }
447       break;
448     case PROP_BLOCK_HEIGHT:
449       filter->block_height = g_value_get_uint64 (value);
450       break;
451     case PROP_BLOCK_THRESH:
452       filter->block_thresh = g_value_get_uint64 (value);
453       break;
454     case PROP_IGNORED_LINES:
455       filter->ignored_lines = g_value_get_uint64 (value);
456       break;
457     default:
458       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
459       break;
460   }
461 }
462 
463 static void
gst_field_analysis_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)464 gst_field_analysis_get_property (GObject * object, guint prop_id,
465     GValue * value, GParamSpec * pspec)
466 {
467   GstFieldAnalysis *filter = GST_FIELDANALYSIS (object);
468 
469   switch (prop_id) {
470     case PROP_FIELD_METRIC:
471     {
472       GstFieldAnalysisFieldMetric metric = DEFAULT_FIELD_METRIC;
473       if (filter->same_field == &same_parity_sad) {
474         metric = GST_FIELDANALYSIS_SAD;
475       } else if (filter->same_field == &same_parity_ssd) {
476         metric = GST_FIELDANALYSIS_SSD;
477       } else if (filter->same_field == &same_parity_3_tap) {
478         metric = GST_FIELDANALYSIS_3_TAP;
479       }
480       g_value_set_enum (value, metric);
481       break;
482     }
483     case PROP_FRAME_METRIC:
484     {
485       GstFieldAnalysisFrameMetric metric = DEFAULT_FRAME_METRIC;
486       if (filter->same_frame == &opposite_parity_5_tap) {
487         metric = GST_FIELDANALYSIS_5_TAP;
488       } else if (filter->same_frame == &opposite_parity_windowed_comb) {
489         metric = GST_FIELDANALYSIS_WINDOWED_COMB;
490       }
491       g_value_set_enum (value, metric);
492       break;
493     }
494     case PROP_NOISE_FLOOR:
495       g_value_set_uint (value, filter->noise_floor);
496       break;
497     case PROP_FIELD_THRESH:
498       g_value_set_float (value, filter->field_thresh);
499       break;
500     case PROP_FRAME_THRESH:
501       g_value_set_float (value, filter->frame_thresh);
502       break;
503     case PROP_COMB_METHOD:
504     {
505       FieldAnalysisCombMethod method = DEFAULT_COMB_METHOD;
506       if (filter->block_score_for_row == &block_score_for_row_32detect) {
507         method = METHOD_32DETECT;
508       } else if (filter->block_score_for_row == &block_score_for_row_iscombed) {
509         method = METHOD_IS_COMBED;
510       } else if (filter->block_score_for_row == &block_score_for_row_5_tap) {
511         method = METHOD_5_TAP;
512       }
513       g_value_set_enum (value, method);
514       break;
515     }
516     case PROP_SPATIAL_THRESH:
517       g_value_set_int64 (value, filter->spatial_thresh);
518       break;
519     case PROP_BLOCK_WIDTH:
520       g_value_set_uint64 (value, filter->block_width);
521       break;
522     case PROP_BLOCK_HEIGHT:
523       g_value_set_uint64 (value, filter->block_height);
524       break;
525     case PROP_BLOCK_THRESH:
526       g_value_set_uint64 (value, filter->block_thresh);
527       break;
528     case PROP_IGNORED_LINES:
529       g_value_set_uint64 (value, filter->ignored_lines);
530       break;
531     default:
532       G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
533       break;
534   }
535 }
536 
537 static void
gst_field_analysis_update_format(GstFieldAnalysis * filter,GstCaps * caps)538 gst_field_analysis_update_format (GstFieldAnalysis * filter, GstCaps * caps)
539 {
540   gint width;
541   GQueue *outbufs;
542   GstVideoInfo vinfo;
543 
544   if (!gst_video_info_from_caps (&vinfo, caps)) {
545     GST_ERROR_OBJECT (filter, "Invalid caps: %" GST_PTR_FORMAT, caps);
546     return;
547   }
548 
549   /* if format is unchanged in our eyes, don't update the context */
550   if ((GST_VIDEO_INFO_WIDTH (&filter->vinfo) == GST_VIDEO_INFO_WIDTH (&vinfo))
551       && (GST_VIDEO_INFO_HEIGHT (&filter->vinfo) ==
552           GST_VIDEO_INFO_HEIGHT (&vinfo))
553       && (GST_VIDEO_INFO_COMP_OFFSET (&filter->vinfo, 0) ==
554           GST_VIDEO_INFO_COMP_OFFSET (&vinfo, 0))
555       && (GST_VIDEO_INFO_COMP_PSTRIDE (&filter->vinfo, 0) ==
556           GST_VIDEO_INFO_COMP_PSTRIDE (&vinfo, 0))
557       && (GST_VIDEO_INFO_COMP_STRIDE (&filter->vinfo, 0) ==
558           GST_VIDEO_INFO_COMP_STRIDE (&vinfo, 0)))
559     return;
560 
561   /* format changed - process and push buffers before updating context */
562 
563   GST_OBJECT_LOCK (filter);
564   filter->flushing = TRUE;
565   outbufs = gst_field_analysis_flush_frames (filter);
566   GST_OBJECT_UNLOCK (filter);
567 
568   if (outbufs) {
569     while (g_queue_get_length (outbufs))
570       gst_pad_push (filter->srcpad, g_queue_pop_head (outbufs));
571   }
572 
573   GST_OBJECT_LOCK (filter);
574   filter->flushing = FALSE;
575 
576   filter->vinfo = vinfo;
577   width = GST_VIDEO_INFO_WIDTH (&filter->vinfo);
578 
579   /* update allocations for metric scores */
580   if (filter->comb_mask) {
581     filter->comb_mask = g_realloc (filter->comb_mask, width);
582   } else {
583     filter->comb_mask = g_malloc (width);
584   }
585   if (filter->block_scores) {
586     gsize nbytes = (width / filter->block_width) * sizeof (guint);
587     filter->block_scores = g_realloc (filter->block_scores, nbytes);
588     memset (filter->block_scores, 0, nbytes);
589   } else {
590     filter->block_scores =
591         g_malloc0 ((width / filter->block_width) * sizeof (guint));
592   }
593 
594   GST_OBJECT_UNLOCK (filter);
595   return;
596 }
597 
598 #define FIELD_ANALYSIS_TOP_BOTTOM   (1 << 0)
599 #define FIELD_ANALYSIS_BOTTOM_TOP   (1 << 1)
600 #define FIELD_ANALYSIS_TOP_MATCH    (1 << 2)
601 #define FIELD_ANALYSIS_BOTTOM_MATCH (1 << 3)
602 
603 /* decorate removes a buffer from the internal queue, on which we have a ref,
604  * then makes its metadata writable (could be the same buffer, could be a new
605  * buffer, but either way we have a ref on it), decorates this buffer and
606  * returns it */
607 static GstBuffer *
gst_field_analysis_decorate(GstFieldAnalysis * filter,gboolean tff,gboolean onefield,FieldAnalysisConclusion conclusion,gboolean drop)608 gst_field_analysis_decorate (GstFieldAnalysis * filter, gboolean tff,
609     gboolean onefield, FieldAnalysisConclusion conclusion, gboolean drop)
610 {
611   GstBuffer *buf = NULL;
612   GstCaps *caps;
613   GstVideoInfo srcpadvinfo, vinfo = filter->vinfo;
614 
615   /* deal with incoming buffer */
616   if (conclusion > FIELD_ANALYSIS_PROGRESSIVE || filter->is_telecine == TRUE) {
617     filter->is_telecine = conclusion != FIELD_ANALYSIS_INTERLACED;
618     if (conclusion >= FIELD_ANALYSIS_TELECINE_PROGRESSIVE
619         || filter->is_telecine == TRUE) {
620       GST_VIDEO_INFO_INTERLACE_MODE (&vinfo) = GST_VIDEO_INTERLACE_MODE_MIXED;
621     } else {
622       GST_VIDEO_INFO_INTERLACE_MODE (&vinfo) =
623           GST_VIDEO_INTERLACE_MODE_INTERLEAVED;
624     }
625   } else {
626     GST_VIDEO_INFO_INTERLACE_MODE (&vinfo) =
627         GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
628   }
629 
630   caps = gst_pad_get_current_caps (filter->srcpad);
631   gst_video_info_from_caps (&srcpadvinfo, caps);
632   gst_caps_unref (caps);
633   /* push a caps event on the src pad before pushing the buffer */
634   if (!gst_video_info_is_equal (&vinfo, &srcpadvinfo)) {
635     gboolean ret = TRUE;
636 
637     caps = gst_video_info_to_caps (&vinfo);
638     GST_OBJECT_UNLOCK (filter);
639     ret = gst_pad_set_caps (filter->srcpad, caps);
640     GST_OBJECT_LOCK (filter);
641     gst_caps_unref (caps);
642 
643     if (!ret) {
644       GST_ERROR_OBJECT (filter, "Could not set pad caps");
645       return NULL;
646     }
647   }
648 
649   buf = filter->frames[filter->nframes - 1].frame.buffer;
650   gst_video_frame_unmap (&filter->frames[filter->nframes - 1].frame);
651   filter->nframes--;
652 
653   /* set buffer flags */
654   if (!tff) {
655     GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_TFF);
656   } else if (tff == 1 || (tff == -1
657           && GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF))) {
658     GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF);
659   }
660 
661   if (onefield) {
662     GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
663   } else {
664     GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
665   }
666 
667   if (drop) {
668     GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_RFF);
669   } else {
670     GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_RFF);
671   }
672 
673   if (conclusion == FIELD_ANALYSIS_PROGRESSIVE
674       || conclusion == FIELD_ANALYSIS_TELECINE_PROGRESSIVE) {
675     GST_BUFFER_FLAG_UNSET (buf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
676   } else {
677     GST_BUFFER_FLAG_SET (buf, GST_VIDEO_BUFFER_FLAG_INTERLACED);
678   }
679 
680   GST_DEBUG_OBJECT (filter,
681       "Pushing buffer with flags: %p, i %d, tff %d, 1f %d, drop %d; conc %d",
682       buf, GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_INTERLACED),
683       GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_TFF),
684       GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_ONEFIELD),
685       GST_BUFFER_FLAG_IS_SET (buf, GST_VIDEO_BUFFER_FLAG_RFF), conclusion);
686 
687   return buf;
688 }
689 
690 /* _flush_one does not touch the buffer ref counts directly but _decorate ()
691  * has some influence on ref counts - see its annotation for details */
692 static GstBuffer *
gst_field_analysis_flush_one(GstFieldAnalysis * filter,GQueue * outbufs)693 gst_field_analysis_flush_one (GstFieldAnalysis * filter, GQueue * outbufs)
694 {
695   GstBuffer *buf = NULL;
696   FieldAnalysis results;
697 
698   if (!filter->nframes)
699     return NULL;
700 
701   GST_DEBUG_OBJECT (filter, "Flushing last frame (nframes %d)",
702       filter->nframes);
703   results = filter->frames[filter->nframes - 1].results;
704   if (results.holding == 1 + TOP_FIELD || results.holding == 1 + BOTTOM_FIELD) {
705     /* should be only one field needed */
706     buf =
707         gst_field_analysis_decorate (filter, results.holding == 1 + TOP_FIELD,
708         TRUE, results.conclusion, FALSE);
709   } else {
710     /* possibility that both fields are needed */
711     buf =
712         gst_field_analysis_decorate (filter, -1, FALSE, results.conclusion,
713         !results.holding);
714   }
715   if (buf) {
716     filter->nframes--;
717     if (outbufs)
718       g_queue_push_tail (outbufs, buf);
719   } else {
720     GST_DEBUG_OBJECT (filter, "Error occurred during decoration");
721   }
722   return buf;
723 }
724 
725 /* _flush_frames () has no direct influence on refcounts and nor does _flush_one,
726  * but _decorate () does and so this function does indirectly */
727 static GQueue *
gst_field_analysis_flush_frames(GstFieldAnalysis * filter)728 gst_field_analysis_flush_frames (GstFieldAnalysis * filter)
729 {
730   GQueue *outbufs;
731 
732   if (filter->nframes < 2)
733     return NULL;
734 
735   outbufs = g_queue_new ();
736 
737   while (filter->nframes)
738     gst_field_analysis_flush_one (filter, outbufs);
739 
740   return outbufs;
741 }
742 
743 static gboolean
gst_field_analysis_sink_event(GstPad * pad,GstObject * parent,GstEvent * event)744 gst_field_analysis_sink_event (GstPad * pad, GstObject * parent,
745     GstEvent * event)
746 {
747   GstFieldAnalysis *filter = GST_FIELDANALYSIS (parent);
748   gboolean forward;             /* should we forward the event? */
749   gboolean ret = TRUE;
750 
751   GST_LOG_OBJECT (pad, "received %s event: %" GST_PTR_FORMAT,
752       GST_EVENT_TYPE_NAME (event), event);
753 
754   switch (GST_EVENT_TYPE (event)) {
755     case GST_EVENT_SEGMENT:
756     case GST_EVENT_EOS:
757     {
758       /* for both SEGMENT and EOS it is safest to process and push queued
759        * buffers */
760       GQueue *outbufs;
761 
762       forward = TRUE;
763 
764       GST_OBJECT_LOCK (filter);
765       filter->flushing = TRUE;
766       outbufs = gst_field_analysis_flush_frames (filter);
767       GST_OBJECT_UNLOCK (filter);
768 
769       if (outbufs) {
770         while (g_queue_get_length (outbufs))
771           gst_pad_push (filter->srcpad, g_queue_pop_head (outbufs));
772       }
773 
774       GST_OBJECT_LOCK (filter);
775       filter->flushing = FALSE;
776       GST_OBJECT_UNLOCK (filter);
777       break;
778     }
779     case GST_EVENT_FLUSH_STOP:
780       /* if we have any buffers left in the queue, unref them until the queue
781        * is empty */
782 
783       forward = TRUE;
784 
785       GST_OBJECT_LOCK (filter);
786       gst_field_analysis_reset (filter);
787       GST_OBJECT_UNLOCK (filter);
788       break;
789     case GST_EVENT_CAPS:
790     {
791       GstCaps *caps;
792 
793       forward = FALSE;
794 
795       gst_event_parse_caps (event, &caps);
796       gst_field_analysis_update_format (filter, caps);
797       ret = gst_pad_set_caps (filter->srcpad, caps);
798       gst_event_unref (event);
799       break;
800     }
801     default:
802       forward = TRUE;
803       break;
804   }
805 
806   if (forward) {
807     ret = gst_pad_event_default (pad, parent, event);
808   }
809 
810   return ret;
811 }
812 
813 
814 static gfloat
same_parity_sad(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])815 same_parity_sad (GstFieldAnalysis * filter, FieldAnalysisFields (*history)[2])
816 {
817   gint j;
818   gfloat sum;
819   guint8 *f1j, *f2j;
820 
821   const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
822   const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
823   const gint stride0x2 =
824       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
825   const gint stride1x2 =
826       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
827   const guint32 noise_floor = filter->noise_floor;
828 
829   f1j =
830       GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
831       0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
832       0) +
833       (*history)[0].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame,
834       0);
835   f2j =
836       GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
837       0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
838       0) +
839       (*history)[1].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame,
840       0);
841 
842   sum = 0.0f;
843   for (j = 0; j < (height >> 1); j++) {
844     guint32 tempsum = 0;
845     fieldanalysis_orc_same_parity_sad_planar_yuv (&tempsum, f1j, f2j,
846         noise_floor, width);
847     sum += tempsum;
848     f1j += stride0x2;
849     f2j += stride1x2;
850   }
851 
852   return sum / (0.5f * width * height);
853 }
854 
855 static gfloat
same_parity_ssd(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])856 same_parity_ssd (GstFieldAnalysis * filter, FieldAnalysisFields (*history)[2])
857 {
858   gint j;
859   gfloat sum;
860   guint8 *f1j, *f2j;
861 
862   const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
863   const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
864   const gint stride0x2 =
865       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
866   const gint stride1x2 =
867       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
868   /* noise floor needs to be squared for SSD */
869   const guint32 noise_floor = filter->noise_floor * filter->noise_floor;
870 
871   f1j =
872       GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
873       0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
874       0) +
875       (*history)[0].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame,
876       0);
877   f2j =
878       GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
879       0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
880       0) +
881       (*history)[1].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame,
882       0);
883 
884   sum = 0.0f;
885   for (j = 0; j < (height >> 1); j++) {
886     guint32 tempsum = 0;
887     fieldanalysis_orc_same_parity_ssd_planar_yuv (&tempsum, f1j, f2j,
888         noise_floor, width);
889     sum += tempsum;
890     f1j += stride0x2;
891     f2j += stride1x2;
892   }
893 
894   return sum / (0.5f * width * height); /* field is half height */
895 }
896 
897 /* horizontal [1,4,1] diff between fields - is this a good idea or should the
898  * current sample be emphasised more or less? */
899 static gfloat
same_parity_3_tap(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])900 same_parity_3_tap (GstFieldAnalysis * filter, FieldAnalysisFields (*history)[2])
901 {
902   gint i, j;
903   gfloat sum;
904   guint8 *f1j, *f2j;
905 
906   const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
907   const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
908   const gint stride0x2 =
909       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
910   const gint stride1x2 =
911       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
912   const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
913   /* noise floor needs to be *6 for [1,4,1] */
914   const guint32 noise_floor = filter->noise_floor * 6;
915 
916   f1j = GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame, 0) +
917       GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame, 0) +
918       (*history)[0].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame,
919       0);
920   f2j =
921       GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
922       0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
923       0) +
924       (*history)[1].parity * GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame,
925       0);
926 
927   sum = 0.0f;
928   for (j = 0; j < (height >> 1); j++) {
929     guint32 tempsum = 0;
930     guint32 diff;
931 
932     /* unroll first as it is a special case */
933     diff = abs (((f1j[0] << 2) + (f1j[incr] << 1))
934         - ((f2j[0] << 2) + (f2j[incr] << 1)));
935     if (diff > noise_floor)
936       sum += diff;
937 
938     fieldanalysis_orc_same_parity_3_tap_planar_yuv (&tempsum, f1j, &f1j[incr],
939         &f1j[incr << 1], f2j, &f2j[incr], &f2j[incr << 1], noise_floor,
940         width - 1);
941     sum += tempsum;
942 
943     /* unroll last as it is a special case */
944     i = width - 1;
945     diff = abs (((f1j[i - incr] << 1) + (f1j[i] << 2))
946         - ((f2j[i - incr] << 1) + (f2j[i] << 2)));
947     if (diff > noise_floor)
948       sum += diff;
949 
950     f1j += stride0x2;
951     f2j += stride1x2;
952   }
953 
954   return sum / ((6.0f / 2.0f) * width * height);        /* 1 + 4 + 1 = 6; field is half height */
955 }
956 
957 /* vertical [1,-3,4,-3,1] - same as is used in FieldDiff from TIVTC,
958  * tritical's AVISynth IVTC filter */
959 /* 0th field's parity defines operation */
960 static gfloat
opposite_parity_5_tap(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])961 opposite_parity_5_tap (GstFieldAnalysis * filter,
962     FieldAnalysisFields (*history)[2])
963 {
964   gint j;
965   gfloat sum;
966   guint8 *fjm2, *fjm1, *fj, *fjp1, *fjp2;
967   guint32 tempsum;
968 
969   const gint width = GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame);
970   const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
971   const gint stride0x2 =
972       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
973   const gint stride1x2 =
974       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0) << 1;
975   /* noise floor needs to be *6 for [1,-3,4,-3,1] */
976   const guint32 noise_floor = filter->noise_floor * 6;
977 
978   sum = 0.0f;
979 
980   /* fj is line j of the combined frame made from the top field even lines of
981    *   field 0 and the bottom field odd lines from field 1
982    * fjp1 is one line down from fj
983    * fjm2 is two lines up from fj
984    * fj with j == 0 is the 0th line of the top field
985    * fj with j == 1 is the 0th line of the bottom field or the 1st field of
986    *   the frame*/
987 
988   /* unroll first line as it is a special case */
989   if ((*history)[0].parity == TOP_FIELD) {
990     fj = GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
991         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame, 0);
992     fjp1 =
993         GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
994         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
995         0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0);
996     fjp2 = fj + stride0x2;
997   } else {
998     fj = GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
999         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame, 0);
1000     fjp1 =
1001         GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
1002         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
1003         0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0);
1004     fjp2 = fj + stride1x2;
1005   }
1006 
1007   tempsum = 0;
1008   fieldanalysis_orc_opposite_parity_5_tap_planar_yuv (&tempsum, fjp2, fjp1, fj,
1009       fjp1, fjp2, noise_floor, width);
1010   sum += tempsum;
1011 
1012   for (j = 1; j < (height >> 1) - 1; j++) {
1013     /* shift everything down a line in the field of interest (means += stridex2) */
1014     fjm2 = fj;
1015     fjm1 = fjp1;
1016     fj = fjp2;
1017     if ((*history)[0].parity == TOP_FIELD) {
1018       fjp1 += stride1x2;
1019       fjp2 += stride0x2;
1020     } else {
1021       fjp1 += stride0x2;
1022       fjp2 += stride1x2;
1023     }
1024 
1025     tempsum = 0;
1026     fieldanalysis_orc_opposite_parity_5_tap_planar_yuv (&tempsum, fjm2, fjm1,
1027         fj, fjp1, fjp2, noise_floor, width);
1028     sum += tempsum;
1029   }
1030 
1031   /* unroll the last line as it is a special case */
1032   /* shift everything down a line in the field of interest (means += stridex2) */
1033   fjm2 = fj;
1034   fjm1 = fjp1;
1035   fj = fjp2;
1036 
1037   tempsum = 0;
1038   fieldanalysis_orc_opposite_parity_5_tap_planar_yuv (&tempsum, fjm2, fjm1, fj,
1039       fjm1, fjm2, noise_floor, width);
1040   sum += tempsum;
1041 
1042   return sum / ((6.0f / 2.0f) * width * height);        /* 1 + 4 + 1 == 3 + 3 == 6; field is half height */
1043 }
1044 
1045 /* this metric was sourced from HandBrake but originally from transcode
1046  * the return value is the highest block score for the row of blocks */
1047 static inline guint64
block_score_for_row_32detect(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2],guint8 * base_fj,guint8 * base_fjp1)1048 block_score_for_row_32detect (GstFieldAnalysis * filter,
1049     FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1)
1050 {
1051   guint64 i, j;
1052   guint8 *comb_mask = filter->comb_mask;
1053   guint *block_scores = filter->block_scores;
1054   guint64 block_score;
1055   guint8 *fjm2, *fjm1, *fj, *fjp1;
1056   const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
1057   const gint stridex2 =
1058       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
1059   const guint64 block_width = filter->block_width;
1060   const guint64 block_height = filter->block_height;
1061   const gint64 spatial_thresh = filter->spatial_thresh;
1062   const gint width =
1063       GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) -
1064       (GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) % block_width);
1065 
1066   fjm2 = base_fj - stridex2;
1067   fjm1 = base_fjp1 - stridex2;
1068   fj = base_fj;
1069   fjp1 = base_fjp1;
1070 
1071   for (j = 0; j < block_height; j++) {
1072     /* we have to work one result ahead of ourselves which results in some small
1073      * peculiarities below */
1074     gint diff1, diff2;
1075 
1076     diff1 = fj[0] - fjm1[0];
1077     diff2 = fj[0] - fjp1[0];
1078     /* change in the same direction */
1079     if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1080         || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1081       comb_mask[0] = abs (fj[0] - fjm2[0]) < 10 && abs (fj[0] - fjm1[0]) > 15;
1082     } else {
1083       comb_mask[0] = FALSE;
1084     }
1085 
1086     for (i = 1; i < width; i++) {
1087       const guint64 idx = i * incr;
1088       const guint64 res_idx = (i - 1) / block_width;
1089 
1090       diff1 = fj[idx] - fjm1[idx];
1091       diff2 = fj[idx] - fjp1[idx];
1092       if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1093           || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1094         comb_mask[i] = abs (fj[idx] - fjm2[idx]) < 10
1095             && abs (fj[idx] - fjm1[idx]) > 15;
1096       } else {
1097         comb_mask[i] = FALSE;
1098       }
1099 
1100       if (i == 1 && comb_mask[i - 1] && comb_mask[i]) {
1101         /* left edge */
1102         block_scores[res_idx]++;
1103       } else if (i == width - 1) {
1104         /* right edge */
1105         if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i])
1106           block_scores[res_idx]++;
1107         if (comb_mask[i - 1] && comb_mask[i])
1108           block_scores[i / block_width]++;
1109       } else if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i]) {
1110         block_scores[res_idx]++;
1111       }
1112     }
1113     /* advance down a line */
1114     fjm2 = fjm1;
1115     fjm1 = fj;
1116     fj = fjp1;
1117     fjp1 = fjm1 + stridex2;
1118   }
1119 
1120   block_score = 0;
1121   for (i = 0; i < width / block_width; i++) {
1122     if (block_scores[i] > block_score)
1123       block_score = block_scores[i];
1124   }
1125 
1126   g_free (block_scores);
1127   g_free (comb_mask);
1128   return block_score;
1129 }
1130 
1131 /* this metric was sourced from HandBrake but originally from
1132  * tritical's isCombedT Avisynth function
1133  * the return value is the highest block score for the row of blocks */
1134 static inline guint64
block_score_for_row_iscombed(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2],guint8 * base_fj,guint8 * base_fjp1)1135 block_score_for_row_iscombed (GstFieldAnalysis * filter,
1136     FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1)
1137 {
1138   guint64 i, j;
1139   guint8 *comb_mask = filter->comb_mask;
1140   guint *block_scores = filter->block_scores;
1141   guint64 block_score;
1142   guint8 *fjm1, *fj, *fjp1;
1143   const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
1144   const gint stridex2 =
1145       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
1146   const guint64 block_width = filter->block_width;
1147   const guint64 block_height = filter->block_height;
1148   const gint64 spatial_thresh = filter->spatial_thresh;
1149   const gint64 spatial_thresh_squared = spatial_thresh * spatial_thresh;
1150   const gint width =
1151       GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) -
1152       (GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) % block_width);
1153 
1154   fjm1 = base_fjp1 - stridex2;
1155   fj = base_fj;
1156   fjp1 = base_fjp1;
1157 
1158   for (j = 0; j < block_height; j++) {
1159     /* we have to work one result ahead of ourselves which results in some small
1160      * peculiarities below */
1161     gint diff1, diff2;
1162 
1163     diff1 = fj[0] - fjm1[0];
1164     diff2 = fj[0] - fjp1[0];
1165     /* change in the same direction */
1166     if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1167         || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1168       comb_mask[0] =
1169           (fjm1[0] - fj[0]) * (fjp1[0] - fj[0]) > spatial_thresh_squared;
1170     } else {
1171       comb_mask[0] = FALSE;
1172     }
1173 
1174     for (i = 1; i < width; i++) {
1175       const guint64 idx = i * incr;
1176       const guint64 res_idx = (i - 1) / block_width;
1177 
1178       diff1 = fj[idx] - fjm1[idx];
1179       diff2 = fj[idx] - fjp1[idx];
1180       if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1181           || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1182         comb_mask[i] =
1183             (fjm1[idx] - fj[idx]) * (fjp1[idx] - fj[idx]) >
1184             spatial_thresh_squared;
1185       } else {
1186         comb_mask[i] = FALSE;
1187       }
1188 
1189       if (i == 1 && comb_mask[i - 1] && comb_mask[i]) {
1190         /* left edge */
1191         block_scores[res_idx]++;
1192       } else if (i == width - 1) {
1193         /* right edge */
1194         if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i])
1195           block_scores[res_idx]++;
1196         if (comb_mask[i - 1] && comb_mask[i])
1197           block_scores[i / block_width]++;
1198       } else if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i]) {
1199         block_scores[res_idx]++;
1200       }
1201     }
1202     /* advance down a line */
1203     fjm1 = fj;
1204     fj = fjp1;
1205     fjp1 = fjm1 + stridex2;
1206   }
1207 
1208   block_score = 0;
1209   for (i = 0; i < width / block_width; i++) {
1210     if (block_scores[i] > block_score)
1211       block_score = block_scores[i];
1212   }
1213 
1214   g_free (block_scores);
1215   g_free (comb_mask);
1216   return block_score;
1217 }
1218 
1219 /* this metric was sourced from HandBrake but originally from
1220  * tritical's isCombedT Avisynth function
1221  * the return value is the highest block score for the row of blocks */
1222 static inline guint64
block_score_for_row_5_tap(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2],guint8 * base_fj,guint8 * base_fjp1)1223 block_score_for_row_5_tap (GstFieldAnalysis * filter,
1224     FieldAnalysisFields (*history)[2], guint8 * base_fj, guint8 * base_fjp1)
1225 {
1226   guint64 i, j;
1227   guint8 *comb_mask = filter->comb_mask;
1228   guint *block_scores = filter->block_scores;
1229   guint64 block_score;
1230   guint8 *fjm2, *fjm1, *fj, *fjp1, *fjp2;
1231   const gint incr = GST_VIDEO_FRAME_COMP_PSTRIDE (&(*history)[0].frame, 0);
1232   const gint stridex2 =
1233       GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0) << 1;
1234   const guint64 block_width = filter->block_width;
1235   const guint64 block_height = filter->block_height;
1236   const gint64 spatial_thresh = filter->spatial_thresh;
1237   const gint64 spatial_threshx6 = 6 * spatial_thresh;
1238   const gint width =
1239       GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) -
1240       (GST_VIDEO_FRAME_WIDTH (&(*history)[0].frame) % block_width);
1241 
1242 
1243   fjm2 = base_fj - stridex2;
1244   fjm1 = base_fjp1 - stridex2;
1245   fj = base_fj;
1246   fjp1 = base_fjp1;
1247   fjp2 = fj + stridex2;
1248 
1249   for (j = 0; j < block_height; j++) {
1250     /* we have to work one result ahead of ourselves which results in some small
1251      * peculiarities below */
1252     gint diff1, diff2;
1253 
1254     diff1 = fj[0] - fjm1[0];
1255     diff2 = fj[0] - fjp1[0];
1256     /* change in the same direction */
1257     if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1258         || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1259       comb_mask[0] =
1260           abs (fjm2[0] + (fj[0] << 2) + fjp2[0] - 3 * (fjm1[0] + fjp1[0])) >
1261           spatial_threshx6;
1262 
1263       /* motion detection that needs previous and next frames
1264          this isn't really necessary, but acts as an optimisation if the
1265          additional delay isn't a problem
1266          if (motion_detection) {
1267          if (abs(fpj[idx] - fj[idx]               ) > motion_thresh &&
1268          abs(           fjm1[idx] - fnjm1[idx]) > motion_thresh &&
1269          abs(           fjp1[idx] - fnjp1[idx]) > motion_thresh)
1270          motion++;
1271          if (abs(             fj[idx]   - fnj[idx]) > motion_thresh &&
1272          abs(fpjm1[idx] - fjm1[idx]           ) > motion_thresh &&
1273          abs(fpjp1[idx] - fjp1[idx]           ) > motion_thresh)
1274          motion++;
1275          } else {
1276          motion = 1;
1277          }
1278        */
1279     } else {
1280       comb_mask[0] = FALSE;
1281     }
1282 
1283     for (i = 1; i < width; i++) {
1284       const guint64 idx = i * incr;
1285       const guint64 res_idx = (i - 1) / block_width;
1286 
1287       diff1 = fj[idx] - fjm1[idx];
1288       diff2 = fj[idx] - fjp1[idx];
1289       if ((diff1 > spatial_thresh && diff2 > spatial_thresh)
1290           || (diff1 < -spatial_thresh && diff2 < -spatial_thresh)) {
1291         comb_mask[i] =
1292             abs (fjm2[idx] + (fj[idx] << 2) + fjp2[idx] - 3 * (fjm1[idx] +
1293                 fjp1[idx])) > spatial_threshx6;
1294       } else {
1295         comb_mask[i] = FALSE;
1296       }
1297 
1298       if (i == 1 && comb_mask[i - 1] && comb_mask[i]) {
1299         /* left edge */
1300         block_scores[res_idx]++;
1301       } else if (i == width - 1) {
1302         /* right edge */
1303         if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i])
1304           block_scores[res_idx]++;
1305         if (comb_mask[i - 1] && comb_mask[i])
1306           block_scores[i / block_width]++;
1307       } else if (comb_mask[i - 2] && comb_mask[i - 1] && comb_mask[i]) {
1308         block_scores[res_idx]++;
1309       }
1310     }
1311     /* advance down a line */
1312     fjm2 = fjm1;
1313     fjm1 = fj;
1314     fj = fjp1;
1315     fjp1 = fjp2;
1316     fjp2 = fj + stridex2;
1317   }
1318 
1319   block_score = 0;
1320   for (i = 0; i < width / block_width; i++) {
1321     if (block_scores[i] > block_score)
1322       block_score = block_scores[i];
1323   }
1324 
1325   g_free (block_scores);
1326   g_free (comb_mask);
1327   return block_score;
1328 }
1329 
1330 /* a pass is made over the field using one of three comb-detection metrics
1331    and the results are then analysed block-wise. if the samples to the left
1332    and right are combed, they contribute to the block score. if the block
1333    score is above the given threshold, the frame is combed. if the block
1334    score is between half the threshold and the threshold, the block is
1335    slightly combed. if when analysis is complete, slight combing is detected
1336    that is returned. if any results are observed that are above the threshold,
1337    the function returns immediately */
1338 /* 0th field's parity defines operation */
1339 static gfloat
opposite_parity_windowed_comb(GstFieldAnalysis * filter,FieldAnalysisFields (* history)[2])1340 opposite_parity_windowed_comb (GstFieldAnalysis * filter,
1341     FieldAnalysisFields (*history)[2])
1342 {
1343   gint j;
1344   gboolean slightly_combed;
1345 
1346   const gint height = GST_VIDEO_FRAME_HEIGHT (&(*history)[0].frame);
1347   const gint stride = GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0);
1348   const guint64 block_thresh = filter->block_thresh;
1349   const guint64 block_height = filter->block_height;
1350   guint8 *base_fj, *base_fjp1;
1351 
1352   if ((*history)[0].parity == TOP_FIELD) {
1353     base_fj =
1354         GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
1355         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame, 0);
1356     base_fjp1 =
1357         GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
1358         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame,
1359         0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[1].frame, 0);
1360   } else {
1361     base_fj =
1362         GST_VIDEO_FRAME_COMP_DATA (&(*history)[1].frame,
1363         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[1].frame, 0);
1364     base_fjp1 =
1365         GST_VIDEO_FRAME_COMP_DATA (&(*history)[0].frame,
1366         0) + GST_VIDEO_FRAME_COMP_OFFSET (&(*history)[0].frame,
1367         0) + GST_VIDEO_FRAME_COMP_STRIDE (&(*history)[0].frame, 0);
1368   }
1369 
1370   /* we operate on a row of blocks of height block_height through each iteration */
1371   slightly_combed = FALSE;
1372   for (j = 0; j <= height - filter->ignored_lines - block_height;
1373       j += block_height) {
1374     guint64 line_offset = (filter->ignored_lines + j) * stride;
1375     guint block_score =
1376         filter->block_score_for_row (filter, history, base_fj + line_offset,
1377         base_fjp1 + line_offset);
1378 
1379     if (block_score > (block_thresh >> 1)
1380         && block_score <= block_thresh) {
1381       /* blend if nothing more combed comes along */
1382       slightly_combed = TRUE;
1383     } else if (block_score > block_thresh) {
1384       if (GST_VIDEO_INFO_INTERLACE_MODE (&(*history)[0].frame.info) ==
1385           GST_VIDEO_INTERLACE_MODE_INTERLEAVED) {
1386         return 1.0f;            /* blend */
1387       } else {
1388         return 2.0f;            /* deinterlace */
1389       }
1390     }
1391   }
1392 
1393   return (gfloat) slightly_combed;      /* TRUE means blend, else don't */
1394 }
1395 
1396 /* this is where the magic happens
1397  *
1398  * the buffer incoming to the chain function (buf_to_queue) is added to the
1399  * internal queue and then should no longer be used until it is popped from the
1400  * queue.
1401  *
1402  * analysis is performed on the incoming buffer (peeked from the queue) and the
1403  * previous buffer using two classes of metrics making up five individual
1404  * scores.
1405  *
1406  * there are two same-parity comparisons: top of current with top of previous
1407  * and bottom of current with bottom of previous
1408  *
1409  * there are three opposing parity comparisons: top of current with bottom of
1410  * _current_, top of current with bottom of previous and bottom of current with
1411  * top of previous.
1412  *
1413  * from the results of these comparisons we can use some rather complex logic to
1414  * identify the state of the previous buffer, decorate and return it and
1415  * identify some preliminary state of the current buffer.
1416  *
1417  * the returned buffer has a ref on it (it has come from _make_metadata_writable
1418  * that was called on an incoming buffer that was queued and then popped) */
1419 static GstBuffer *
gst_field_analysis_process_buffer(GstFieldAnalysis * filter,GstBuffer ** buf_to_queue)1420 gst_field_analysis_process_buffer (GstFieldAnalysis * filter,
1421     GstBuffer ** buf_to_queue)
1422 {
1423   /* res0/1 correspond to f0/1 */
1424   FieldAnalysis *res0, *res1;
1425   FieldAnalysisFields history[2];
1426   GstBuffer *outbuf = NULL;
1427 
1428   /* move previous result to index 1 */
1429   filter->frames[1] = filter->frames[0];
1430 
1431   if (!gst_video_frame_map (&filter->frames[0].frame, &filter->vinfo,
1432           *buf_to_queue, GST_MAP_READ)) {
1433     GST_ERROR_OBJECT (filter, "Failed to map buffer: %" GST_PTR_FORMAT,
1434         *buf_to_queue);
1435     return NULL;
1436   }
1437   filter->nframes++;
1438   /* note that we have a ref and mapping the buffer takes a ref so to destroy a
1439    * buffer we need to unmap it and unref it */
1440 
1441   res0 = &filter->frames[0].results;    /* results for current frame */
1442   res1 = &filter->frames[1].results;    /* results for previous frame */
1443 
1444   history[0].frame = filter->frames[0].frame;
1445   /* we do it like this because the first frame has no predecessor so this is
1446    * the only result we can get for it */
1447   if (filter->nframes >= 1) {
1448     history[1].frame = filter->frames[0].frame;
1449     history[0].parity = TOP_FIELD;
1450     history[1].parity = BOTTOM_FIELD;
1451     /* compare the fields within the buffer, if the buffer exhibits combing it
1452      * could be interlaced or a mixed telecine frame */
1453     res0->f = filter->same_frame (filter, &history);
1454     res0->t = res0->b = res0->t_b = res0->b_t = G_MAXFLOAT;
1455     if (filter->nframes == 1)
1456       GST_DEBUG_OBJECT (filter, "Scores: f %f, t , b , t_b , b_t ", res0->f);
1457     if (res0->f <= filter->frame_thresh) {
1458       res0->conclusion = FIELD_ANALYSIS_PROGRESSIVE;
1459     } else {
1460       res0->conclusion = FIELD_ANALYSIS_INTERLACED;
1461     }
1462     res0->holding = -1;         /* needed fields unknown */
1463     res0->drop = FALSE;
1464   }
1465   if (filter->nframes >= 2) {
1466     guint telecine_matches;
1467     gboolean first_buffer = filter->first_buffer;
1468 
1469     filter->first_buffer = FALSE;
1470 
1471     history[1].frame = filter->frames[1].frame;
1472 
1473     /* compare the top and bottom fields to the previous frame */
1474     history[0].parity = TOP_FIELD;
1475     history[1].parity = TOP_FIELD;
1476     res0->t = filter->same_field (filter, &history);
1477     history[0].parity = BOTTOM_FIELD;
1478     history[1].parity = BOTTOM_FIELD;
1479     res0->b = filter->same_field (filter, &history);
1480 
1481     /* compare the top field from this frame to the bottom of the previous for
1482      * for combing (and vice versa) */
1483     history[0].parity = TOP_FIELD;
1484     history[1].parity = BOTTOM_FIELD;
1485     res0->t_b = filter->same_frame (filter, &history);
1486     history[0].parity = BOTTOM_FIELD;
1487     history[1].parity = TOP_FIELD;
1488     res0->b_t = filter->same_frame (filter, &history);
1489 
1490     GST_DEBUG_OBJECT (filter,
1491         "Scores: f %f, t %f, b %f, t_b %f, b_t %f", res0->f,
1492         res0->t, res0->b, res0->t_b, res0->b_t);
1493 
1494     /* analysis */
1495     telecine_matches = 0;
1496     if (res0->t_b <= filter->frame_thresh)
1497       telecine_matches |= FIELD_ANALYSIS_TOP_BOTTOM;
1498     if (res0->b_t <= filter->frame_thresh)
1499       telecine_matches |= FIELD_ANALYSIS_BOTTOM_TOP;
1500     /* normally if there is a top or bottom field match, it is significantly
1501      * smaller than the other match - try 10% */
1502     if (res0->t <= filter->field_thresh || res0->t * (100 / 10) < res0->b)
1503       telecine_matches |= FIELD_ANALYSIS_TOP_MATCH;
1504     if (res0->b <= filter->field_thresh || res0->b * (100 / 10) < res0->t)
1505       telecine_matches |= FIELD_ANALYSIS_BOTTOM_MATCH;
1506 
1507     if (telecine_matches & (FIELD_ANALYSIS_TOP_MATCH |
1508             FIELD_ANALYSIS_BOTTOM_MATCH)) {
1509       /* we have a repeated field => some kind of telecine */
1510       if (res1->f <= filter->frame_thresh) {
1511         /* prev P */
1512         if ((telecine_matches & FIELD_ANALYSIS_TOP_MATCH)
1513             && (telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH)) {
1514           /* prev P, cur repeated => cur P */
1515           res0->conclusion = FIELD_ANALYSIS_TELECINE_PROGRESSIVE;
1516           res0->holding = 1 + BOTH_FIELDS;
1517           /* push prev P, RFF */
1518           res1->drop = TRUE;
1519           outbuf =
1520               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1521               res1->drop);
1522         } else {
1523           /* prev P, cur t xor b matches => cur TCM */
1524           res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1525           /* hold non-repeated: if bottom match, hold top = 1 + 0 */
1526           res0->holding = 1 + !(telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH);
1527           /* push prev P */
1528           outbuf =
1529               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1530               res1->drop);
1531         }
1532       } else {
1533         /* prev !P */
1534         gboolean b, t;
1535 
1536         if (res0->f <= filter->frame_thresh) {
1537           /* cur P */
1538           res0->conclusion = FIELD_ANALYSIS_TELECINE_PROGRESSIVE;
1539           res0->holding = 1 + BOTH_FIELDS;
1540         } else {
1541           /* cur !P */
1542           res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1543           if (telecine_matches & FIELD_ANALYSIS_TOP_MATCH
1544               && telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH) {
1545             /* cur t && b */
1546             res0->holding = 0;
1547           } else {
1548             /* cur t xor b; hold non-repeated */
1549             res0->holding =
1550                 1 + !(telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH);
1551           }
1552         }
1553 
1554         if (res1->holding == -1) {
1555           b = t = TRUE;
1556         } else {
1557           b = res1->holding == 1 + BOTTOM_FIELD;
1558           t = res1->holding == 1 + TOP_FIELD;
1559         }
1560 
1561         if ((t && telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH) || (b
1562                 && telecine_matches & FIELD_ANALYSIS_TOP_MATCH)) {
1563           if (t && telecine_matches & FIELD_ANALYSIS_BOTTOM_MATCH) {
1564             res1->holding = 1 + TOP_FIELD;
1565           } else if (b && telecine_matches & FIELD_ANALYSIS_TOP_MATCH) {
1566             res1->holding = 1 + BOTTOM_FIELD;
1567           }
1568           /* push 1F held field */
1569           outbuf =
1570               gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1571               res1->conclusion, res1->drop);
1572         } else if (res0->f > filter->frame_thresh && ((t
1573                     && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP) || (b
1574                     && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM))) {
1575           if (t && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP) {
1576             res1->holding = 1 + TOP_FIELD;
1577           } else if (b && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM) {
1578             res1->holding = 1 + BOTTOM_FIELD;
1579           }
1580           res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1581           /* hold the opposite field to the one held in the last frame */
1582           res0->holding = 1 + (res1->holding == 1 + TOP_FIELD);
1583           /* push 1F held field */
1584           outbuf =
1585               gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1586               res1->conclusion, res1->drop);
1587         } else if (first_buffer && (telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP
1588                 || telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM)) {
1589           /* non-matched field is an orphan in the first buffer - push orphan as 1F */
1590           res1->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1591           /* if prev b matched, prev t is orphan */
1592           res1->holding = 1 + !(telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM);
1593           /* push 1F held field */
1594           outbuf =
1595               gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1596               res1->conclusion, res1->drop);
1597         } else if (res1->holding == 1 + BOTH_FIELDS || res1->holding == -1) {
1598           /* holding both fields, push prev as is */
1599           outbuf =
1600               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1601               res1->drop);
1602         } else {
1603           /* push prev as is with RFF */
1604           res1->drop = TRUE;
1605           outbuf =
1606               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1607               res1->drop);
1608         }
1609       }
1610     } else if (res0->f <= filter->frame_thresh) {
1611       /* cur P */
1612       res0->conclusion = FIELD_ANALYSIS_PROGRESSIVE;
1613       res0->holding = 1 + BOTH_FIELDS;
1614       if (res1->holding == 1 + BOTH_FIELDS || res1->holding == -1) {
1615         /* holding both fields, push prev as is */
1616         outbuf =
1617             gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1618             res1->drop);
1619       } else if (res1->holding > 0) {
1620         /* holding one field, push prev 1F held */
1621         outbuf =
1622             gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1623             res1->conclusion, res1->drop);
1624       } else {
1625         /* unknown or no fields held, push prev as is with RFF */
1626         /* this will push unknown as drop - should be pushed as not drop? */
1627         res1->drop = TRUE;
1628         outbuf =
1629             gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1630             res1->drop);
1631       }
1632     } else {
1633       /* cur !P */
1634       if (telecine_matches & (FIELD_ANALYSIS_TOP_BOTTOM |
1635               FIELD_ANALYSIS_BOTTOM_TOP)) {
1636         /* cross-parity match => TCM */
1637         gboolean b, t;
1638 
1639         if (res1->holding == -1) {
1640           b = t = TRUE;
1641         } else {
1642           b = res1->holding == 1 + BOTTOM_FIELD;
1643           t = res1->holding == 1 + TOP_FIELD;
1644         }
1645 
1646         res0->conclusion = FIELD_ANALYSIS_TELECINE_MIXED;
1647         /* leave holding as unknown */
1648         if (res1->holding == 1 + BOTH_FIELDS) {
1649           /* prev P/TCP/I [or TCM repeated (weird case)] */
1650           /* push prev as is */
1651           outbuf =
1652               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1653               res1->drop);
1654         } else if ((t && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM) || (b
1655                 && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP)) {
1656           /* held is opposite to matched => need both field from prev */
1657           /* if t_b, hold bottom from prev and top from current, else vice-versa */
1658           res1->holding = 1 + ! !(telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM);
1659           res0->holding = 1 + !(telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM);
1660           /* push prev TCM */
1661           outbuf =
1662               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1663               res1->drop);
1664         } else if ((res1->holding > 0 && res1->holding != 1 + BOTH_FIELDS) || (t
1665                 && telecine_matches & FIELD_ANALYSIS_BOTTOM_TOP) || (b
1666                 && telecine_matches & FIELD_ANALYSIS_TOP_BOTTOM)) {
1667           /* held field is needed, push prev 1F held */
1668           outbuf =
1669               gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1670               res1->conclusion, res1->drop);
1671         } else {
1672           /* holding none or unknown */
1673           /* push prev as is with RFF */
1674           res1->drop = TRUE;
1675           outbuf =
1676               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1677               res1->drop);
1678         }
1679       } else {
1680         /* cur I */
1681         res0->conclusion = FIELD_ANALYSIS_INTERLACED;
1682         res0->holding = 1 + BOTH_FIELDS;
1683         /* push prev appropriately */
1684         res1->drop = res1->holding <= 0;
1685         if (res1->holding != 0) {
1686           res1->drop = FALSE;
1687           if (res1->holding == 1 + BOTH_FIELDS || res1->holding == -1) {
1688             /* push prev as is */
1689             outbuf =
1690                 gst_field_analysis_decorate (filter, -1, FALSE,
1691                 res1->conclusion, res1->drop);
1692           } else {
1693             /* push prev 1F held */
1694             outbuf =
1695                 gst_field_analysis_decorate (filter, !(res1->holding - 1), TRUE,
1696                 res1->conclusion, res1->drop);
1697           }
1698         } else {
1699           /* push prev as is with RFF */
1700           res1->drop = TRUE;
1701           outbuf =
1702               gst_field_analysis_decorate (filter, -1, FALSE, res1->conclusion,
1703               res1->drop);
1704         }
1705       }
1706     }
1707   }
1708 
1709   switch (res0->conclusion) {
1710     case FIELD_ANALYSIS_PROGRESSIVE:
1711       GST_DEBUG_OBJECT (filter, "Conclusion: PROGRESSIVE");
1712       break;
1713     case FIELD_ANALYSIS_INTERLACED:
1714       GST_DEBUG_OBJECT (filter, "Conclusion: INTERLACED");
1715       break;
1716     case FIELD_ANALYSIS_TELECINE_PROGRESSIVE:
1717       GST_DEBUG_OBJECT (filter, "Conclusion: TC PROGRESSIVE");
1718       break;
1719     case FIELD_ANALYSIS_TELECINE_MIXED:
1720       GST_DEBUG_OBJECT (filter, "Conclusion: TC MIXED %s",
1721           res0->holding ==
1722           1 + BOTH_FIELDS ? "top and bottom" : res0->holding ==
1723           1 + BOTTOM_FIELD ? "bottom" : "top");
1724       break;
1725     default:
1726       GST_DEBUG_OBJECT (filter, "Invalid conclusion! This is a bug!");
1727       break;
1728   }
1729 
1730   return outbuf;
1731 }
1732 
1733 /* we have a ref on buf when it comes into chain */
1734 static GstFlowReturn
gst_field_analysis_chain(GstPad * pad,GstObject * parent,GstBuffer * buf)1735 gst_field_analysis_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
1736 {
1737   GstFlowReturn ret = GST_FLOW_OK;
1738   GstFieldAnalysis *filter;
1739   GstBuffer *outbuf = NULL;
1740 
1741   filter = GST_FIELDANALYSIS (parent);
1742 
1743   GST_OBJECT_LOCK (filter);
1744   if (filter->flushing) {
1745     GST_DEBUG_OBJECT (filter, "We are flushing.");
1746     /* we have a ref on buf so it must be unreffed */
1747     goto unref_unlock_ret;
1748   }
1749 
1750   if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT)) {
1751     GST_DEBUG_OBJECT (filter, "Discont: flushing");
1752     /* we should have a ref on outbuf, either because we had one when it entered
1753      * the queue and _make_metadata_writable () inside _decorate () returned
1754      * the same buffer or because it returned a new buffer on which we have one
1755      * ref */
1756     outbuf = gst_field_analysis_flush_one (filter, NULL);
1757 
1758     if (outbuf) {
1759       /* we give away our ref on outbuf here */
1760       GST_OBJECT_UNLOCK (filter);
1761       ret = gst_pad_push (filter->srcpad, outbuf);
1762       GST_OBJECT_LOCK (filter);
1763       if (filter->flushing) {
1764         GST_DEBUG_OBJECT (filter, "We are flushing. outbuf already pushed.");
1765         /* we have a ref on buf so it must be unreffed */
1766         goto unref_unlock_ret;
1767       }
1768     }
1769 
1770     gst_field_analysis_clear_frames (filter);
1771 
1772     if (ret != GST_FLOW_OK) {
1773       GST_DEBUG_OBJECT (filter,
1774           "Pushing of flushed buffer failed with return %d", ret);
1775       /* we have a ref on buf so it must be unreffed */
1776       goto unref_unlock_ret;
1777     } else {
1778       outbuf = NULL;
1779     }
1780   }
1781 
1782   /* after this function, buf has been pushed to the internal queue and its ref
1783    * retained there and we have a ref on outbuf */
1784   outbuf = gst_field_analysis_process_buffer (filter, &buf);
1785 
1786   GST_OBJECT_UNLOCK (filter);
1787 
1788   /* here we give up our ref on outbuf */
1789   if (outbuf)
1790     ret = gst_pad_push (filter->srcpad, outbuf);
1791 
1792   return ret;
1793 
1794 unref_unlock_ret:
1795   /* we must unref the input buffer here */
1796   gst_buffer_unref (buf);
1797   GST_OBJECT_UNLOCK (filter);
1798   return ret;
1799 }
1800 
1801 static GstStateChangeReturn
gst_field_analysis_change_state(GstElement * element,GstStateChange transition)1802 gst_field_analysis_change_state (GstElement * element,
1803     GstStateChange transition)
1804 {
1805   GstStateChangeReturn ret;
1806   GstFieldAnalysis *filter = GST_FIELDANALYSIS (element);
1807 
1808   switch (transition) {
1809     case GST_STATE_CHANGE_NULL_TO_READY:
1810       break;
1811     case GST_STATE_CHANGE_READY_TO_PAUSED:
1812       break;
1813     case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
1814       break;
1815     default:
1816       break;
1817   }
1818 
1819   ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
1820   if (ret != GST_STATE_CHANGE_SUCCESS)
1821     return ret;
1822 
1823   switch (transition) {
1824     case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
1825       break;
1826     case GST_STATE_CHANGE_PAUSED_TO_READY:
1827       gst_field_analysis_reset (filter);
1828       break;
1829     case GST_STATE_CHANGE_READY_TO_NULL:
1830     default:
1831       break;
1832   }
1833 
1834   return ret;
1835 }
1836 
1837 static void
gst_field_analysis_finalize(GObject * object)1838 gst_field_analysis_finalize (GObject * object)
1839 {
1840   GstFieldAnalysis *filter = GST_FIELDANALYSIS (object);
1841 
1842   gst_field_analysis_reset (filter);
1843 
1844   G_OBJECT_CLASS (parent_class)->finalize (object);
1845 }
1846 
1847 
1848 static gboolean
fieldanalysis_init(GstPlugin * plugin)1849 fieldanalysis_init (GstPlugin * plugin)
1850 {
1851   return GST_ELEMENT_REGISTER (fieldanalysis, plugin);
1852 }
1853 
1854 GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
1855     GST_VERSION_MINOR,
1856     fieldanalysis,
1857     "Video field analysis",
1858     fieldanalysis_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
1859