1 /*
2 * GStreamer OpenNI2 device source element
3 * Copyright (C) 2013 Miguel Casas-Sanchez <miguelecasassanchez@gmail.com>
4
5 * This library is free software; you can
6 * redistribute it and/or modify it under the terms of the GNU Library
7 * General Public License as published by the Free Software Foundation;
8 * either version 2 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful, but
10 * WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library
12 * General Public License for more details. You should have received a copy
13 * of the GNU Library General Public License along with this library; if
14 * not, write to the Free Software Foundation, Inc., 51 Franklin St,
15 * Fifth Floor, Boston, MA 02110-1301, USA.
16 */
17
18 /**
19 * SECTION:element-openni2src
20 *
21 * ## Examples
22 *
23 * Some recorded .oni files are available at <http://people.cs.pitt.edu/~chang/1635/proj11/kinectRecord>
24 *
25 * ``` shell
26 * LD_LIBRARY_PATH=/usr/lib/OpenNI2/Drivers/ gst-launch-1.0 --gst-debug=openni2src:5 openni2src location='Downloads/mr.oni' sourcetype=depth ! videoconvert ! ximagesink
27 * ```
28 *
29 * ``` shell
30 * LD_LIBRARY_PATH=/usr/lib/OpenNI2/Drivers/ gst-launch-1.0 --gst-debug=openni2src:5 openni2src location='Downloads/mr.oni' sourcetype=color ! videoconvert ! ximagesink
31 * ```
32 */
33
34 #ifdef HAVE_CONFIG_H
35 #include "config.h"
36 #endif
37
38 #include "gstopenni2src.h"
39
40 GST_DEBUG_CATEGORY_STATIC (openni2src_debug);
41 #define GST_CAT_DEFAULT openni2src_debug
42 static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src",
43 GST_PAD_SRC,
44 GST_PAD_ALWAYS,
45 GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE ("{RGBA, RGB, GRAY16_LE}"))
46 );
47
48 enum
49 {
50 PROP_0,
51 PROP_LOCATION,
52 PROP_SOURCETYPE
53 };
54 typedef enum
55 {
56 SOURCETYPE_DEPTH,
57 SOURCETYPE_COLOR,
58 SOURCETYPE_BOTH
59 } GstOpenni2SourceType;
60 #define DEFAULT_SOURCETYPE SOURCETYPE_DEPTH
61
62 #define SAMPLE_READ_WAIT_TIMEOUT 2000 /* 2000ms */
63
64 #define GST_TYPE_OPENNI2_SRC_SOURCETYPE (gst_openni2_src_sourcetype_get_type ())
65 static GType
gst_openni2_src_sourcetype_get_type(void)66 gst_openni2_src_sourcetype_get_type (void)
67 {
68 static GType etype = 0;
69 if (etype == 0) {
70 static const GEnumValue values[] = {
71 {SOURCETYPE_DEPTH, "Get depth readings", "depth"},
72 {SOURCETYPE_COLOR, "Get color readings", "color"},
73 {SOURCETYPE_BOTH,
74 "Get color and depth (as alpha) readings - EXPERIMENTAL",
75 "both"},
76 {0, NULL, NULL},
77 };
78 etype = g_enum_register_static ("GstOpenni2SrcSourcetype", values);
79 }
80 return etype;
81 }
82
83 /* GObject methods */
84 static void gst_openni2_src_dispose (GObject * object);
85 static void gst_openni2_src_finalize (GObject * gobject);
86 static void gst_openni2_src_set_property (GObject * object, guint prop_id,
87 const GValue * value, GParamSpec * pspec);
88 static void gst_openni2_src_get_property (GObject * object, guint prop_id,
89 GValue * value, GParamSpec * pspec);
90
91 /* basesrc methods */
92 static gboolean gst_openni2_src_start (GstBaseSrc * bsrc);
93 static gboolean gst_openni2_src_stop (GstBaseSrc * bsrc);
94 static gboolean gst_openni2_src_set_caps (GstBaseSrc * src, GstCaps * caps);
95 static GstCaps *gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter);
96 static gboolean gst_openni2src_decide_allocation (GstBaseSrc * bsrc,
97 GstQuery * query);
98
99 /* element methods */
100 static GstStateChangeReturn gst_openni2_src_change_state (GstElement * element,
101 GstStateChange transition);
102
103 /* pushsrc method */
104 static GstFlowReturn gst_openni2src_fill (GstPushSrc * src, GstBuffer * buf);
105
106 /* OpenNI2 interaction methods */
107 static gboolean openni2_initialise_library ();
108 static gboolean openni2_initialise_devices (GstOpenni2Src * src);
109 static GstFlowReturn openni2_read_gstbuffer (GstOpenni2Src * src,
110 GstBuffer * buf);
111
112 #define parent_class gst_openni2_src_parent_class
113 G_DEFINE_TYPE (GstOpenni2Src, gst_openni2_src, GST_TYPE_PUSH_SRC);
114 GST_ELEMENT_REGISTER_DEFINE (openni2src, "openni2src", GST_RANK_NONE,
115 GST_TYPE_OPENNI2_SRC);
116
117 static void
gst_openni2_src_class_init(GstOpenni2SrcClass * klass)118 gst_openni2_src_class_init (GstOpenni2SrcClass * klass)
119 {
120 GObjectClass *gobject_class;
121 GstPushSrcClass *pushsrc_class;
122 GstBaseSrcClass *basesrc_class;
123 GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
124
125 gobject_class = (GObjectClass *) klass;
126 basesrc_class = (GstBaseSrcClass *) klass;
127 pushsrc_class = (GstPushSrcClass *) klass;
128
129 gobject_class->dispose = gst_openni2_src_dispose;
130 gobject_class->finalize = gst_openni2_src_finalize;
131 gobject_class->set_property = gst_openni2_src_set_property;
132 gobject_class->get_property = gst_openni2_src_get_property;
133 g_object_class_install_property
134 (gobject_class, PROP_LOCATION,
135 g_param_spec_string ("location", "Location",
136 "Source uri, can be a file or a device.", "", (GParamFlags)
137 (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
138 g_object_class_install_property (gobject_class, PROP_SOURCETYPE,
139 g_param_spec_enum ("sourcetype",
140 "Device source type",
141 "Type of readings to get from the source",
142 GST_TYPE_OPENNI2_SRC_SOURCETYPE, DEFAULT_SOURCETYPE,
143 (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
144
145 gst_type_mark_as_plugin_api (GST_TYPE_OPENNI2_SRC_SOURCETYPE,
146 (GstPluginAPIFlags) 0);
147
148 basesrc_class->start = GST_DEBUG_FUNCPTR (gst_openni2_src_start);
149 basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_openni2_src_stop);
150 basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_openni2_src_get_caps);
151 basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_openni2_src_set_caps);
152 basesrc_class->decide_allocation =
153 GST_DEBUG_FUNCPTR (gst_openni2src_decide_allocation);
154
155 gst_element_class_add_static_pad_template (element_class, &srctemplate);
156
157 gst_element_class_set_static_metadata (element_class, "Openni2 client source",
158 "Source/Video",
159 "Extract readings from an OpenNI supported device (Kinect etc). ",
160 "Miguel Casas-Sanchez <miguelecasassanchez@gmail.com>");
161
162 element_class->change_state = gst_openni2_src_change_state;
163
164 pushsrc_class->fill = GST_DEBUG_FUNCPTR (gst_openni2src_fill);
165
166 GST_DEBUG_CATEGORY_INIT (openni2src_debug, "openni2src", 0,
167 "OpenNI2 Device Source");
168
169 /* OpenNI2 initialisation inside this function */
170 openni2_initialise_library ();
171 }
172
173 static void
gst_openni2_src_init(GstOpenni2Src * ni2src)174 gst_openni2_src_init (GstOpenni2Src * ni2src)
175 {
176 gst_base_src_set_live (GST_BASE_SRC (ni2src), TRUE);
177 gst_base_src_set_format (GST_BASE_SRC (ni2src), GST_FORMAT_TIME);
178
179 ni2src->device = new openni::Device ();
180 ni2src->depth = new openni::VideoStream ();
181 ni2src->color = new openni::VideoStream ();
182 ni2src->depthFrame = new openni::VideoFrameRef ();
183 ni2src->colorFrame = new openni::VideoFrameRef ();
184
185 ni2src->oni_start_ts = GST_CLOCK_TIME_NONE;
186 }
187
188 static void
gst_openni2_src_dispose(GObject * object)189 gst_openni2_src_dispose (GObject * object)
190 {
191 GstOpenni2Src *ni2src = GST_OPENNI2_SRC (object);
192
193 if (ni2src->gst_caps)
194 gst_caps_unref (ni2src->gst_caps);
195
196 G_OBJECT_CLASS (parent_class)->dispose (object);
197 }
198
199 static void
gst_openni2_src_finalize(GObject * gobject)200 gst_openni2_src_finalize (GObject * gobject)
201 {
202 GstOpenni2Src *ni2src = GST_OPENNI2_SRC (gobject);
203
204 if (ni2src->uri_name) {
205 g_free (ni2src->uri_name);
206 ni2src->uri_name = NULL;
207 }
208
209 if (ni2src->gst_caps) {
210 gst_caps_unref (ni2src->gst_caps);
211 ni2src->gst_caps = NULL;
212 }
213
214 if (ni2src->device) {
215 delete ni2src->device;
216 ni2src->device = NULL;
217 }
218
219 if (ni2src->depth) {
220 delete ni2src->depth;
221 ni2src->depth = NULL;
222 }
223
224 if (ni2src->color) {
225 delete ni2src->color;
226 ni2src->color = NULL;
227 }
228
229 if (ni2src->depthFrame) {
230 delete ni2src->depthFrame;
231 ni2src->depthFrame = NULL;
232 }
233
234 if (ni2src->colorFrame) {
235 delete ni2src->colorFrame;
236 ni2src->colorFrame = NULL;
237 }
238
239 G_OBJECT_CLASS (parent_class)->finalize (gobject);
240 }
241
242 static void
gst_openni2_src_set_property(GObject * object,guint prop_id,const GValue * value,GParamSpec * pspec)243 gst_openni2_src_set_property (GObject * object, guint prop_id,
244 const GValue * value, GParamSpec * pspec)
245 {
246 GstOpenni2Src *openni2src = GST_OPENNI2_SRC (object);
247
248 GST_OBJECT_LOCK (openni2src);
249 switch (prop_id) {
250 case PROP_LOCATION:
251 if (!g_value_get_string (value)) {
252 GST_WARNING ("location property cannot be NULL");
253 break;
254 }
255
256 if (openni2src->uri_name != NULL) {
257 g_free (openni2src->uri_name);
258 openni2src->uri_name = NULL;
259 }
260
261 openni2src->uri_name = g_value_dup_string (value);
262 break;
263 case PROP_SOURCETYPE:
264 openni2src->sourcetype = g_value_get_enum (value);
265 break;
266 default:
267 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
268 break;
269 }
270
271 GST_OBJECT_UNLOCK (openni2src);
272 }
273
274 static void
gst_openni2_src_get_property(GObject * object,guint prop_id,GValue * value,GParamSpec * pspec)275 gst_openni2_src_get_property (GObject * object, guint prop_id,
276 GValue * value, GParamSpec * pspec)
277 {
278 GstOpenni2Src *openni2src = GST_OPENNI2_SRC (object);
279
280 GST_OBJECT_LOCK (openni2src);
281 switch (prop_id) {
282 case PROP_LOCATION:
283 g_value_set_string (value, openni2src->uri_name);
284 break;
285 case PROP_SOURCETYPE:
286 g_value_set_enum (value, openni2src->sourcetype);
287 break;
288 default:
289 G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
290 break;
291 }
292 GST_OBJECT_UNLOCK (openni2src);
293 }
294
295 /* Interesting info from gstv4l2src.c:
296 * "start and stop are not symmetric -- start will open the device, but not
297 * start capture. it's setcaps that will start capture, which is called via
298 * basesrc's negotiate method. stop will both stop capture and close t device."
299 */
300 static gboolean
gst_openni2_src_start(GstBaseSrc * bsrc)301 gst_openni2_src_start (GstBaseSrc * bsrc)
302 {
303 GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
304 openni::Status rc = openni::STATUS_OK;
305
306 if (src->depth->isValid ()) {
307 rc = src->depth->start ();
308 if (rc != openni::STATUS_OK) {
309 GST_ERROR_OBJECT (src, "Couldn't start the depth stream: %s",
310 openni::OpenNI::getExtendedError ());
311 return FALSE;
312 }
313 }
314
315 if (src->color->isValid ()) {
316 rc = src->color->start ();
317 if (rc != openni::STATUS_OK) {
318 GST_ERROR_OBJECT (src, "Couldn't start the color stream: %s",
319 openni::OpenNI::getExtendedError ());
320 return FALSE;
321 }
322 }
323
324 return TRUE;
325 }
326
327 static gboolean
gst_openni2_src_stop(GstBaseSrc * bsrc)328 gst_openni2_src_stop (GstBaseSrc * bsrc)
329 {
330 GstOpenni2Src *src = GST_OPENNI2_SRC (bsrc);
331
332 if (src->depthFrame)
333 src->depthFrame->release ();
334
335 if (src->colorFrame)
336 src->colorFrame->release ();
337
338 if (src->depth->isValid ()) {
339 src->depth->stop ();
340 src->depth->destroy ();
341 }
342
343 if (src->color->isValid ()) {
344 src->color->stop ();
345 src->color->destroy ();
346 }
347
348 src->device->close ();
349
350 return TRUE;
351 }
352
353 static GstCaps *
gst_openni2_src_get_caps(GstBaseSrc * src,GstCaps * filter)354 gst_openni2_src_get_caps (GstBaseSrc * src, GstCaps * filter)
355 {
356 GstOpenni2Src *ni2src;
357 GstCaps *caps;
358 GstVideoInfo info;
359 GstVideoFormat format;
360
361 ni2src = GST_OPENNI2_SRC (src);
362
363 GST_OBJECT_LOCK (ni2src);
364 if (ni2src->gst_caps)
365 goto out;
366
367 // If we are here, we need to compose the caps and return them.
368
369 if (ni2src->depth->isValid () && ni2src->color->isValid () &&
370 ni2src->sourcetype == SOURCETYPE_BOTH
371 && ni2src->colorpixfmt == openni::PIXEL_FORMAT_RGB888) {
372 format = GST_VIDEO_FORMAT_RGBA;
373 } else if (ni2src->depth->isValid () &&
374 ni2src->sourcetype == SOURCETYPE_DEPTH) {
375 format = GST_VIDEO_FORMAT_GRAY16_LE;
376 } else if (ni2src->color->isValid () && ni2src->sourcetype == SOURCETYPE_COLOR
377 && ni2src->colorpixfmt == openni::PIXEL_FORMAT_RGB888) {
378 format = GST_VIDEO_FORMAT_RGB;
379 } else {
380 goto out;
381 }
382
383 gst_video_info_init (&info);
384 gst_video_info_set_format (&info, format, ni2src->width, ni2src->height);
385 info.fps_n = ni2src->fps;
386 info.fps_d = 1;
387 caps = gst_video_info_to_caps (&info);
388
389 GST_INFO_OBJECT (ni2src, "probed caps: %" GST_PTR_FORMAT, caps);
390 ni2src->gst_caps = caps;
391
392 out:
393 GST_OBJECT_UNLOCK (ni2src);
394
395 if (!ni2src->gst_caps)
396 return gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD (ni2src));
397
398 return (filter)
399 ? gst_caps_intersect_full (filter, ni2src->gst_caps,
400 GST_CAPS_INTERSECT_FIRST)
401 : gst_caps_ref (ni2src->gst_caps);
402 }
403
404 static gboolean
gst_openni2_src_set_caps(GstBaseSrc * src,GstCaps * caps)405 gst_openni2_src_set_caps (GstBaseSrc * src, GstCaps * caps)
406 {
407 GstOpenni2Src *ni2src;
408
409 ni2src = GST_OPENNI2_SRC (src);
410
411 return gst_video_info_from_caps (&ni2src->info, caps);
412 }
413
414 static GstStateChangeReturn
gst_openni2_src_change_state(GstElement * element,GstStateChange transition)415 gst_openni2_src_change_state (GstElement * element, GstStateChange transition)
416 {
417 GstStateChangeReturn ret = GST_STATE_CHANGE_FAILURE;
418 GstOpenni2Src *src = GST_OPENNI2_SRC (element);
419
420 switch (transition) {
421 case GST_STATE_CHANGE_NULL_TO_READY:
422 /* Action! */
423 if (!openni2_initialise_devices (src))
424 return GST_STATE_CHANGE_FAILURE;
425 break;
426 case GST_STATE_CHANGE_READY_TO_PAUSED:
427 break;
428 case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
429 break;
430 default:
431 break;
432 }
433
434 ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
435 if (ret == GST_STATE_CHANGE_FAILURE) {
436 return ret;
437 }
438
439 switch (transition) {
440 case GST_STATE_CHANGE_READY_TO_NULL:
441 gst_openni2_src_stop (GST_BASE_SRC (src));
442 if (src->gst_caps) {
443 gst_caps_unref (src->gst_caps);
444 src->gst_caps = NULL;
445 }
446 break;
447 case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
448 break;
449 case GST_STATE_CHANGE_PAUSED_TO_READY:
450 src->oni_start_ts = GST_CLOCK_TIME_NONE;
451 break;
452 default:
453 break;
454 }
455
456 return ret;
457 }
458
459
460 static GstFlowReturn
gst_openni2src_fill(GstPushSrc * src,GstBuffer * buf)461 gst_openni2src_fill (GstPushSrc * src, GstBuffer * buf)
462 {
463 GstOpenni2Src *ni2src = GST_OPENNI2_SRC (src);
464 return openni2_read_gstbuffer (ni2src, buf);
465 }
466
467 static gboolean
gst_openni2src_decide_allocation(GstBaseSrc * bsrc,GstQuery * query)468 gst_openni2src_decide_allocation (GstBaseSrc * bsrc, GstQuery * query)
469 {
470 GstBufferPool *pool;
471 guint size, min, max;
472 gboolean update;
473 GstStructure *config;
474 GstCaps *caps;
475 GstVideoInfo info;
476
477 gst_query_parse_allocation (query, &caps, NULL);
478 gst_video_info_from_caps (&info, caps);
479
480 if (gst_query_get_n_allocation_pools (query) > 0) {
481 gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
482 update = TRUE;
483 } else {
484 pool = NULL;
485 min = max = 0;
486 size = info.size;
487 update = FALSE;
488 }
489
490 GST_DEBUG_OBJECT (bsrc, "allocation: size:%u min:%u max:%u pool:%"
491 GST_PTR_FORMAT " caps:%" GST_PTR_FORMAT, size, min, max, pool, caps);
492
493 if (!pool)
494 pool = gst_video_buffer_pool_new ();
495
496 config = gst_buffer_pool_get_config (pool);
497 gst_buffer_pool_config_set_params (config, caps, size, min, max);
498
499 if (gst_query_find_allocation_meta (query, GST_VIDEO_META_API_TYPE, NULL)) {
500 GST_DEBUG_OBJECT (pool, "activate Video Meta");
501 gst_buffer_pool_config_add_option (config,
502 GST_BUFFER_POOL_OPTION_VIDEO_META);
503 }
504
505 gst_buffer_pool_set_config (pool, config);
506
507 if (update)
508 gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
509 else
510 gst_query_add_allocation_pool (query, pool, size, min, max);
511
512 gst_object_unref (pool);
513
514 return GST_BASE_SRC_CLASS (parent_class)->decide_allocation (bsrc, query);
515 }
516
517 static gboolean
openni2_initialise_library(void)518 openni2_initialise_library (void)
519 {
520 openni::Status rc = openni::STATUS_OK;
521 rc = openni::OpenNI::initialize ();
522 if (rc != openni::STATUS_OK) {
523 GST_ERROR ("Initialization failed: %s",
524 openni::OpenNI::getExtendedError ());
525 openni::OpenNI::shutdown ();
526 return GST_FLOW_ERROR;
527 }
528 return (rc == openni::STATUS_OK);
529 }
530
531 static gboolean
openni2_initialise_devices(GstOpenni2Src * src)532 openni2_initialise_devices (GstOpenni2Src * src)
533 {
534 openni::Status rc = openni::STATUS_OK;
535 const char *deviceURI = openni::ANY_DEVICE;
536
537 if (src->uri_name)
538 deviceURI = src->uri_name;
539
540 rc = src->device->open (deviceURI);
541 if (rc != openni::STATUS_OK) {
542 GST_ERROR_OBJECT (src, "Device (%s) open failed: %s", deviceURI,
543 openni::OpenNI::getExtendedError ());
544 openni::OpenNI::shutdown ();
545 return FALSE;
546 }
547
548 /* depth sensor */
549 rc = src->depth->create (*src->device, openni::SENSOR_DEPTH);
550 if (rc == openni::STATUS_OK) {
551 rc = src->depth->start ();
552 if (rc != openni::STATUS_OK) {
553 GST_ERROR_OBJECT (src, "%s", openni::OpenNI::getExtendedError ());
554 src->depth->destroy ();
555 }
556 } else {
557 GST_WARNING_OBJECT (src, "Couldn't find depth stream: %s",
558 openni::OpenNI::getExtendedError ());
559 }
560
561 /* color sensor */
562 rc = src->color->create (*src->device, openni::SENSOR_COLOR);
563 if (rc == openni::STATUS_OK) {
564 rc = src->color->start ();
565 if (rc != openni::STATUS_OK) {
566 GST_ERROR_OBJECT (src, "Couldn't start color stream: %s ",
567 openni::OpenNI::getExtendedError ());
568 src->color->destroy ();
569 }
570 } else {
571 GST_WARNING_OBJECT (src, "Couldn't find color stream: %s",
572 openni::OpenNI::getExtendedError ());
573 }
574
575 if (!src->depth->isValid () && !src->color->isValid ()) {
576 GST_ERROR_OBJECT (src, "No valid streams. Exiting");
577 openni::OpenNI::shutdown ();
578 return FALSE;
579 }
580
581 /* Get resolution and make sure is valid */
582 if (src->depth->isValid () && src->color->isValid ()) {
583 src->depthVideoMode = src->depth->getVideoMode ();
584 src->colorVideoMode = src->color->getVideoMode ();
585
586 int depthWidth = src->depthVideoMode.getResolutionX ();
587 int depthHeight = src->depthVideoMode.getResolutionY ();
588 int colorWidth = src->colorVideoMode.getResolutionX ();
589 int colorHeight = src->colorVideoMode.getResolutionY ();
590
591 if (depthWidth == colorWidth && depthHeight == colorHeight) {
592 src->width = depthWidth;
593 src->height = depthHeight;
594 src->fps = src->depthVideoMode.getFps ();
595 src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
596 src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
597 } else {
598 GST_ERROR_OBJECT (src, "Error - expect color and depth to be"
599 " in same resolution: D: %dx%d vs C: %dx%d",
600 depthWidth, depthHeight, colorWidth, colorHeight);
601 return FALSE;
602 }
603 GST_INFO_OBJECT (src, "DEPTH&COLOR resolution: %dx%d",
604 src->width, src->height);
605 } else if (src->depth->isValid ()) {
606 src->depthVideoMode = src->depth->getVideoMode ();
607 src->width = src->depthVideoMode.getResolutionX ();
608 src->height = src->depthVideoMode.getResolutionY ();
609 src->fps = src->depthVideoMode.getFps ();
610 src->depthpixfmt = src->depthVideoMode.getPixelFormat ();
611 GST_INFO_OBJECT (src, "DEPTH resolution: %dx%d", src->width, src->height);
612 } else if (src->color->isValid ()) {
613 src->colorVideoMode = src->color->getVideoMode ();
614 src->width = src->colorVideoMode.getResolutionX ();
615 src->height = src->colorVideoMode.getResolutionY ();
616 src->fps = src->colorVideoMode.getFps ();
617 src->colorpixfmt = src->colorVideoMode.getPixelFormat ();
618 GST_INFO_OBJECT (src, "COLOR resolution: %dx%d", src->width, src->height);
619 } else {
620 GST_ERROR_OBJECT (src, "Expected at least one of the streams to be valid.");
621 return FALSE;
622 }
623
624 return TRUE;
625 }
626
627 static GstFlowReturn
openni2_read_gstbuffer(GstOpenni2Src * src,GstBuffer * buf)628 openni2_read_gstbuffer (GstOpenni2Src * src, GstBuffer * buf)
629 {
630 openni::Status rc = openni::STATUS_OK;
631 openni::VideoStream * pStream = src->depth;
632 int changedStreamDummy;
633 GstVideoFrame vframe;
634 uint64_t oni_ts;
635
636 /* Block until we get some data */
637 rc = openni::OpenNI::waitForAnyStream (&pStream, 1, &changedStreamDummy,
638 SAMPLE_READ_WAIT_TIMEOUT);
639 if (rc != openni::STATUS_OK) {
640 GST_ERROR_OBJECT (src, "Frame read timeout: %s",
641 openni::OpenNI::getExtendedError ());
642 return GST_FLOW_ERROR;
643 }
644
645 if (src->depth->isValid () && src->color->isValid () &&
646 src->sourcetype == SOURCETYPE_BOTH) {
647 rc = src->depth->readFrame (src->depthFrame);
648 if (rc != openni::STATUS_OK) {
649 GST_ERROR_OBJECT (src, "Frame read error: %s",
650 openni::OpenNI::getExtendedError ());
651 return GST_FLOW_ERROR;
652 }
653 rc = src->color->readFrame (src->colorFrame);
654 if (rc != openni::STATUS_OK) {
655 GST_ERROR_OBJECT (src, "Frame read error: %s",
656 openni::OpenNI::getExtendedError ());
657 return GST_FLOW_ERROR;
658 }
659
660 /* Copy colour information */
661 gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
662
663 guint8 *pData = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
664 guint8 *pColor = (guint8 *) src->colorFrame->getData ();
665 /* Add depth as 8bit alpha channel, depth is 16bit samples. */
666 guint16 *pDepth = (guint16 *) src->depthFrame->getData ();
667
668 for (int i = 0; i < src->colorFrame->getHeight (); ++i) {
669 for (int j = 0; j < src->colorFrame->getWidth (); ++j) {
670 pData[4 * j + 0] = pColor[3 * j + 0];
671 pData[4 * j + 1] = pColor[3 * j + 1];
672 pData[4 * j + 2] = pColor[3 * j + 2];
673 pData[4 * j + 3] = pDepth[j] >> 8;
674 }
675 pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
676 pColor += src->colorFrame->getStrideInBytes ();
677 pDepth += src->depthFrame->getStrideInBytes () / 2;
678 }
679 gst_video_frame_unmap (&vframe);
680
681 oni_ts = src->colorFrame->getTimestamp () * 1000;
682
683 GST_LOG_OBJECT (src, "sending buffer (%d+%d)B",
684 src->colorFrame->getDataSize (),
685 src->depthFrame->getDataSize ());
686 } else if (src->depth->isValid () && src->sourcetype == SOURCETYPE_DEPTH) {
687 rc = src->depth->readFrame (src->depthFrame);
688 if (rc != openni::STATUS_OK) {
689 GST_ERROR_OBJECT (src, "Frame read error: %s",
690 openni::OpenNI::getExtendedError ());
691 return GST_FLOW_ERROR;
692 }
693
694 /* Copy depth information */
695 gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
696
697 guint16 *pData = (guint16 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
698 guint16 *pDepth = (guint16 *) src->depthFrame->getData ();
699
700 for (int i = 0; i < src->depthFrame->getHeight (); ++i) {
701 memcpy (pData, pDepth, 2 * src->depthFrame->getWidth ());
702 pDepth += src->depthFrame->getStrideInBytes () / 2;
703 pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0) / 2;
704 }
705 gst_video_frame_unmap (&vframe);
706
707 oni_ts = src->depthFrame->getTimestamp () * 1000;
708
709 GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB",
710 src->depthFrame->getWidth (),
711 src->depthFrame->getHeight (),
712 src->depthFrame->getDataSize ());
713 } else if (src->color->isValid () && src->sourcetype == SOURCETYPE_COLOR) {
714 rc = src->color->readFrame (src->colorFrame);
715 if (rc != openni::STATUS_OK) {
716 GST_ERROR_OBJECT (src, "Frame read error: %s",
717 openni::OpenNI::getExtendedError ());
718 return GST_FLOW_ERROR;
719 }
720
721 gst_video_frame_map (&vframe, &src->info, buf, GST_MAP_WRITE);
722
723 guint8 *pData = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, 0);
724 guint8 *pColor = (guint8 *) src->colorFrame->getData ();
725
726 for (int i = 0; i < src->colorFrame->getHeight (); ++i) {
727 memcpy (pData, pColor, 3 * src->colorFrame->getWidth ());
728 pColor += src->colorFrame->getStrideInBytes ();
729 pData += GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0);
730 }
731 gst_video_frame_unmap (&vframe);
732
733 oni_ts = src->colorFrame->getTimestamp () * 1000;
734
735 GST_LOG_OBJECT (src, "sending buffer (%dx%d)=%dB",
736 src->colorFrame->getWidth (),
737 src->colorFrame->getHeight (),
738 src->colorFrame->getDataSize ());
739 } else {
740 g_return_val_if_reached (GST_FLOW_ERROR);
741 return GST_FLOW_ERROR;
742 }
743
744 if (G_UNLIKELY (src->oni_start_ts == GST_CLOCK_TIME_NONE))
745 src->oni_start_ts = oni_ts;
746
747 GST_BUFFER_PTS (buf) = oni_ts - src->oni_start_ts;
748
749 GST_LOG_OBJECT (src, "Calculated PTS as %" GST_TIME_FORMAT,
750 GST_TIME_ARGS (GST_BUFFER_PTS (buf)));
751
752 return GST_FLOW_OK;
753 }
754