diff --git a/modules/videoio/include/opencv2/videoio.hpp b/modules/videoio/include/opencv2/videoio.hpp index 8610fe3e8c..444bb890af 100644 --- a/modules/videoio/include/opencv2/videoio.hpp +++ b/modules/videoio/include/opencv2/videoio.hpp @@ -86,7 +86,8 @@ enum { CAP_ANY = 0, // autodetect CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK CAP_MSMF = 1400, // Microsoft Media Foundation (via videoInput) CAP_INTELPERC = 1500, // Intel Perceptual Computing SDK - CAP_OPENNI2 = 1600 // OpenNI2 (for Kinect) + CAP_OPENNI2 = 1600, // OpenNI2 (for Kinect) + CAP_OPENNI2_ASUS = 1610 // OpenNI2 (for Asus Xtion and Occipital Structure sensors) }; // generic properties (based on DC1394 properties) diff --git a/modules/videoio/src/cap_openni2.cpp b/modules/videoio/src/cap_openni2.cpp index 49ccdea876..92d9db8f26 100644 --- a/modules/videoio/src/cap_openni2.cpp +++ b/modules/videoio/src/cap_openni2.cpp @@ -68,8 +68,6 @@ #define CV_DEPTH_STREAM 0 #define CV_COLOR_STREAM 1 -#define CV_NUM_STREAMS 2 - #include "OpenNI.h" #include "PS1080.h" @@ -161,6 +159,7 @@ protected: int currentStream; + int numStream; std::vector outputMaps; }; @@ -198,6 +197,7 @@ openni::VideoMode CvCapture_OpenNI2::defaultDepthOutputMode() CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) { + numStream = 2; const char* deviceURI = openni::ANY_DEVICE; openni::Status status; int deviceType = DEVICE_DEFAULT; @@ -215,6 +215,10 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) index %= 10; } + // Asus XTION and Occipital Structure Sensor do not have an image generator + if (deviceType == DEVICE_ASUS_XTION) + numStream = 1; + if( deviceType > DEVICE_MAX ) return; @@ -259,6 +263,10 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find depth stream:: %s\n", openni::OpenNI::getExtendedError())); return; } + + streams = new openni::VideoStream*[numStream]; + streams[CV_DEPTH_STREAM] = &depth; + // create a color object status = color.create(device, openni::SENSOR_COLOR); if (status == openni::STATUS_OK) @@ -275,13 +283,19 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) color.destroy(); return; } + streams[CV_COLOR_STREAM] = &color; } - else + else if (numStream == 2) { CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Couldn't find color stream: %s\n", openni::OpenNI::getExtendedError())); return; } + if( !readCamerasParams() ) + { + CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n")); + return; + } // if( deviceType == DEVICE_ASUS_XTION ) // { @@ -291,14 +305,6 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) // depthGenerator.SetIntProperty("RegistrationType", 1 /*XN_PROCESSING_HARDWARE*/); // } - if( !readCamerasParams() ) - { - CV_Error(CV_StsError, cv::format("CvCapture_OpenNI2::CvCapture_OpenNI2 : Could not read cameras parameters\n")); - return; - } - streams = new openni::VideoStream*[CV_NUM_STREAMS]; - streams[CV_DEPTH_STREAM] = &depth; - streams[CV_COLOR_STREAM] = &color; outputMaps.resize( outputMapsTypesCount ); @@ -309,6 +315,7 @@ CvCapture_OpenNI2::CvCapture_OpenNI2( int index ) CvCapture_OpenNI2::CvCapture_OpenNI2(const char * filename) { + numStream = 2; openni::Status status; isContextOpened = false; @@ -695,7 +702,7 @@ bool CvCapture_OpenNI2::grabFrame() bool isGrabbed = false; - openni::Status status = openni::OpenNI::waitForAnyStream(streams, CV_NUM_STREAMS, ¤tStream, CV_STREAM_TIMEOUT); + openni::Status status = openni::OpenNI::waitForAnyStream(streams, numStream, ¤tStream, CV_STREAM_TIMEOUT); if( status != openni::STATUS_OK ) return false;