OCV_OPTION(WITH_OPENCL "Include OpenCL Runtime support" ON IF (NOT IOS) )
OCV_OPTION(WITH_OPENCLAMDFFT "Include AMD OpenCL FFT library support" ON IF (NOT ANDROID AND NOT IOS) )
OCV_OPTION(WITH_OPENCLAMDBLAS "Include AMD OpenCL BLAS library support" ON IF (NOT ANDROID AND NOT IOS) )
-
+OCV_OPTION(WITH_INTELPERC "Include Intel Perceptual Computing support" OFF IF WIN32 )
# OpenCV build components
# ===================================================
status(" Xine:" HAVE_XINE THEN "YES (ver ${ALIASOF_libxine_VERSION})" ELSE NO)
endif(DEFINED WITH_XINE)
+if(DEFINED WITH_INTELPERC)
+ status(" Intel PerC:" HAVE_INTELPERC THEN "YES" ELSE NO)
+endif(DEFINED WITH_INTELPERC)
+
+
# ========================== Other third-party libraries ==========================
status("")
status(" Other third-party libraries:")
--- /dev/null
+# Main variables:
+# INTELPERC_LIBRARY and INTELPERC_INCLUDES to link Intel Perceptial Computing SDK modules
+# HAVE_INTELPERC for conditional compilation OpenCV with/without Intel Perceptial Computing SDK
+
+if(NOT "${INTELPERC_LIB_DIR}" STREQUAL "${INTELPERC_LIB_DIR_INTERNAL}")
+ unset(INTELPERC_LIBRARY CACHE)
+ unset(INTELPERC_LIB_DIR CACHE)
+endif()
+
+if(NOT "${INTELPERC_INCLUDE_DIR}" STREQUAL "${INTELPERC_INCLUDE_DIR_INTERNAL}")
+ unset(INTELPERC_INCLUDES CACHE)
+ unset(INTELPERC_INCLUDE_DIR CACHE)
+endif()
+
+if(WIN32)
+ if(NOT (MSVC64 OR MINGW64))
+ find_file(INTELPERC_INCLUDES "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Intel Perceptual Computing SDK interface header")
+ find_library(INTELPERC_LIBRARY "libpxc.lib" PATHS "$ENV{PCSDK_DIR}lib/Win32" DOC "Intel Perceptual Computing SDK library")
+ else()
+ find_file(INTELPERC_INCLUDES "pxcsession.h" PATHS "$ENV{PCSDK_DIR}include" DOC "Intel Perceptual Computing SDK interface header")
+ find_library(INTELPERC_LIBRARY "libpxc.lib" PATHS "$ENV{PCSDK_DIR}/lib/x64" DOC "Intel Perceptual Computing SDK library")
+ endif()
+endif()
+
+if(INTELPERC_LIBRARY AND INTELPERC_INCLUDES)
+ set(HAVE_INTELPERC TRUE)
+endif() #if(INTELPERC_LIBRARY AND INTELPERC_INCLUDES)
+
+get_filename_component(INTELPERC_LIB_DIR "${INTELPERC_LIBRARY}" PATH)
+get_filename_component(INTELPERC_INCLUDE_DIR "${INTELPERC_INCLUDES}" PATH)
+
+if(HAVE_INTELPERC)
+ set(INTELPERC_LIB_DIR "${INTELPERC_LIB_DIR}" CACHE PATH "Path to Intel Perceptual Computing SDK interface libraries" FORCE)
+ set(INTELPERC_INCLUDE_DIR "${INTELPERC_INCLUDE_DIR}" CACHE PATH "Path to Intel Perceptual Computing SDK interface headers" FORCE)
+endif()
+
+if(INTELPERC_LIBRARY)
+ set(INTELPERC_LIB_DIR_INTERNAL "${INTELPERC_LIB_DIR}" CACHE INTERNAL "This is the value of the last time INTELPERC_LIB_DIR was set successfully." FORCE)
+else()
+ message( WARNING, " Intel Perceptual Computing SDK library directory (set by INTELPERC_LIB_DIR variable) is not found or does not have Intel Perceptual Computing SDK libraries." )
+endif()
+
+if(INTELPERC_INCLUDES)
+ set(INTELPERC_INCLUDE_DIR_INTERNAL "${INTELPERC_INCLUDE_DIR}" CACHE INTERNAL "This is the value of the last time INTELPERC_INCLUDE_DIR was set successfully." FORCE)
+else()
+ message( WARNING, " Intel Perceptual Computing SDK include directory (set by INTELPERC_INCLUDE_DIR variable) is not found or does not have Intel Perceptual Computing SDK include files." )
+endif()
+
+mark_as_advanced(FORCE INTELPERC_LIBRARY)
+mark_as_advanced(FORCE INTELPERC_INCLUDES)
+
set(HAVE_QTKIT YES)
endif()
endif()
+
+# --- Intel Perceptual Computing SSDK ---
+ocv_clear_vars(HAVE_INTELPERC)
+if(WITH_INTELPERC)
+ include("${OpenCV_SOURCE_DIR}/cmake/OpenCVFindIntelPerCSDK.cmake")
+endif(WITH_INTELPERC)
/* Xine video library */
#cmakedefine HAVE_XINE
+/* Intel Perceptual Computing SDK library */
+#cmakedefine HAVE_INTELPERC
+
/* Define to 1 if your processor stores words with the most significant byte
first (like Motorola and SPARC, unlike Intel and VAX). */
#cmakedefine WORDS_BIGENDIAN
list(APPEND HIGHGUI_LIBRARIES "-framework QTKit" "-framework QuartzCore" "-framework AppKit")
endif()
+if(HAVE_INTELPERC)
+ list(APPEND highgui_srcs src/cap_intelperc.cpp)
+ ocv_include_directories(${INTELPERC_INCLUDE_DIR})
+ list(APPEND HIGHGUI_LIBRARIES ${INTELPERC_LIBRARY})
+endif(HAVE_INTELPERC)
+
if(IOS)
add_definitions(-DHAVE_IOS=1)
list(APPEND highgui_srcs src/ios_conversions.mm src/cap_ios_abstract_camera.mm src/cap_ios_photo_camera.mm src/cap_ios_video_camera.mm)
CV_CAP_AVFOUNDATION = 1200, // AVFoundation framework for iOS (OS X Lion will have the same API)
- CV_CAP_GIGANETIX = 1300 // Smartek Giganetix GigEVisionSDK
+ CV_CAP_GIGANETIX = 1300, // Smartek Giganetix GigEVisionSDK
+
+ CV_CAP_INTELPERC = 1500 // Intel Perceptual Computing SDK
};
/* start capturing frames from camera: index = camera_index + domain_offset (CV_CAP_*) */
CV_CAP_PROP_GIGA_FRAME_HEIGH_MAX = 10004,
CV_CAP_PROP_GIGA_FRAME_SENS_WIDTH = 10005,
CV_CAP_PROP_GIGA_FRAME_SENS_HEIGH = 10006
+
+ ,CV_CAP_PROP_INTELPERC_PROFILE_COUNT = 11001,
+ CV_CAP_PROP_INTELPERC_PROFILE_IDX = 11002,
+ CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE = 11003,
+ CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE = 11004,
+ CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD = 11005,
+ CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ = 11006,
+ CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT = 11007,
+
+ // Intel PerC streams
+ CV_CAP_INTELPERC_DEPTH_STREAM = 1 << 31,
+ CV_CAP_INTELPERC_IMAGE_STREAM = 1 << 30,
+ CV_CAP_INTELPERC_STREAMS_MASK = CV_CAP_INTELPERC_DEPTH_STREAM + CV_CAP_INTELPERC_IMAGE_STREAM,
};
enum
CV_CAP_ANDROID_ANTIBANDING_OFF
};
+enum
+{
+ CV_CAP_INTELPERC_DEPTH_MAP = 0, // Each pixel is a 16-bit integer. The value indicates the distance from an object to the camera's XY plane or the Cartesian depth.
+ CV_CAP_INTELPERC_UVDEPTH_MAP = 1, // Each pixel contains two 32-bit floating point values in the range of 0-1, representing the mapping of depth coordinates to the color coordinates.
+ CV_CAP_INTELPERC_IR_MAP = 2, // Each pixel is a 16-bit integer. The value indicates the intensity of the reflected laser beam.
+ CV_CAP_INTELPERC_IMAGE = 3,
+};
+
/* retrieve or set capture properties */
CVAPI(double) cvGetCaptureProperty( CvCapture* capture, int property_id );
CVAPI(int) cvSetCaptureProperty( CvCapture* capture, int property_id, double value );
#ifdef HAVE_GIGE_API
CV_CAP_GIGANETIX,
#endif
+#ifdef HAVE_INTELPERC
+ CV_CAP_INTELPERC,
+#endif
-1
};
defined(HAVE_AVFOUNDATION) || \
defined(HAVE_ANDROID_NATIVE_CAMERA) || \
defined(HAVE_GIGE_API) || \
+ defined(HAVE_INTELPERC) || \
(0)
// local variable to memorize the captured device
CvCapture *capture;
return capture;
break; // CV_CAP_GIGANETIX
#endif
+
+#ifdef HAVE_INTELPERC
+ case CV_CAP_INTELPERC:
+ capture = cvCreateCameraCapture_IntelPerC(index);
+ if (capture)
+ return capture;
+ break; // CV_CAP_INTEL_PERC
+#endif
}
}
--- /dev/null
+#include "precomp.hpp"
+
+#ifdef HAVE_INTELPERC
+
+#if defined TBB_INTERFACE_VERSION && TBB_INTERFACE_VERSION < 5000
+# undef HAVE_TBB
+#endif
+
+#include "pxcsession.h"
+#include "pxcsmartptr.h"
+#include "pxccapture.h"
+
+class CvIntelPerCStreamBase
+{
+protected:
+ struct FrameInternal
+ {
+ IplImage* retrieveFrame()
+ {
+ if (m_mat.empty())
+ return NULL;
+ m_iplHeader = IplImage(m_mat);
+ return &m_iplHeader;
+ }
+ cv::Mat m_mat;
+ private:
+ IplImage m_iplHeader;
+ };
+public:
+ CvIntelPerCStreamBase()
+ : m_profileIdx(-1)
+ , m_frameIdx(0)
+ , m_timeStampStartNS(0)
+ {
+ }
+ virtual ~CvIntelPerCStreamBase()
+ {
+ }
+
+ bool isValid()
+ {
+ return (m_device.IsValid() && m_stream.IsValid());
+ }
+ bool grabFrame()
+ {
+ if (!m_stream.IsValid())
+ return false;
+ if (-1 == m_profileIdx)
+ {
+ if (!setProperty(CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0))
+ return false;
+ }
+ PXCSmartPtr<PXCImage> pxcImage; PXCSmartSP sp;
+ if (PXC_STATUS_NO_ERROR > m_stream->ReadStreamAsync(&pxcImage, &sp))
+ return false;
+ if (PXC_STATUS_NO_ERROR > sp->Synchronize())
+ return false;
+ if (0 == m_timeStampStartNS)
+ m_timeStampStartNS = pxcImage->QueryTimeStamp();
+ m_timeStamp = (double)((pxcImage->QueryTimeStamp() - m_timeStampStartNS) / 10000);
+ m_frameIdx++;
+ return prepareIplImage(pxcImage);
+ }
+ int getProfileIDX() const
+ {
+ return m_profileIdx;
+ }
+public:
+ virtual bool initStream(PXCSession *session) = 0;
+ virtual double getProperty(int propIdx)
+ {
+ double ret = 0.0;
+ switch (propIdx)
+ {
+ case CV_CAP_PROP_INTELPERC_PROFILE_COUNT:
+ ret = (double)m_profiles.size();
+ break;
+ case CV_CAP_PROP_FRAME_WIDTH :
+ if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size()))
+ ret = (double)m_profiles[m_profileIdx].imageInfo.width;
+ break;
+ case CV_CAP_PROP_FRAME_HEIGHT :
+ if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size()))
+ ret = (double)m_profiles[m_profileIdx].imageInfo.height;
+ break;
+ case CV_CAP_PROP_FPS :
+ if ((0 <= m_profileIdx) && (m_profileIdx < m_profiles.size()))
+ {
+ ret = ((double)m_profiles[m_profileIdx].frameRateMin.numerator / (double)m_profiles[m_profileIdx].frameRateMin.denominator
+ + (double)m_profiles[m_profileIdx].frameRateMax.numerator / (double)m_profiles[m_profileIdx].frameRateMax.denominator) / 2.0;
+ }
+ break;
+ case CV_CAP_PROP_POS_FRAMES:
+ ret = (double)m_frameIdx;
+ break;
+ case CV_CAP_PROP_POS_MSEC:
+ ret = m_timeStamp;
+ break;
+ };
+ return ret;
+ }
+ virtual bool setProperty(int propIdx, double propVal)
+ {
+ bool isSet = false;
+ switch (propIdx)
+ {
+ case CV_CAP_PROP_INTELPERC_PROFILE_IDX:
+ {
+ int propValInt = (int)propVal;
+ if ((0 <= propValInt) && (propValInt < m_profiles.size()))
+ {
+ if (m_profileIdx != propValInt)
+ {
+ m_profileIdx = propValInt;
+ if (m_stream.IsValid())
+ m_stream->SetProfile(&m_profiles[m_profileIdx]);
+ m_frameIdx = 0;
+ m_timeStampStartNS = 0;
+ }
+ isSet = true;
+ }
+ }
+ break;
+ };
+ return isSet;
+ }
+protected:
+ PXCSmartPtr<PXCCapture::Device> m_device;
+ bool initDevice(PXCSession *session)
+ {
+ if (NULL == session)
+ return false;
+
+ pxcStatus sts = PXC_STATUS_NO_ERROR;
+ PXCSession::ImplDesc templat;
+ memset(&templat,0,sizeof(templat));
+ templat.group = PXCSession::IMPL_GROUP_SENSOR;
+ templat.subgroup= PXCSession::IMPL_SUBGROUP_VIDEO_CAPTURE;
+
+ for (int modidx = 0; PXC_STATUS_NO_ERROR <= sts; modidx++)
+ {
+ PXCSession::ImplDesc desc;
+ sts = session->QueryImpl(&templat, modidx, &desc);
+ if (PXC_STATUS_NO_ERROR > sts)
+ break;
+
+ PXCSmartPtr<PXCCapture> capture;
+ sts = session->CreateImpl<PXCCapture>(&desc, &capture);
+ if (!capture.IsValid())
+ continue;
+
+ /* enumerate devices */
+ for (int devidx = 0; PXC_STATUS_NO_ERROR <= sts; devidx++)
+ {
+ PXCSmartPtr<PXCCapture::Device> device;
+ sts = capture->CreateDevice(devidx, &device);
+ if (PXC_STATUS_NO_ERROR <= sts)
+ {
+ m_device = device.ReleasePtr();
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ PXCSmartPtr<PXCCapture::VideoStream> m_stream;
+ void initStreamImpl(PXCImage::ImageType type)
+ {
+ if (!m_device.IsValid())
+ return;
+
+ pxcStatus sts = PXC_STATUS_NO_ERROR;
+ /* enumerate streams */
+ for (int streamidx = 0; PXC_STATUS_NO_ERROR <= sts; streamidx++)
+ {
+ PXCCapture::Device::StreamInfo sinfo;
+ sts = m_device->QueryStream(streamidx, &sinfo);
+ if (PXC_STATUS_NO_ERROR > sts)
+ break;
+ if (PXCCapture::VideoStream::CUID != sinfo.cuid)
+ continue;
+ if (type != sinfo.imageType)
+ continue;
+
+ sts = m_device->CreateStream<PXCCapture::VideoStream>(streamidx, &m_stream);
+ if (PXC_STATUS_NO_ERROR == sts)
+ break;
+ m_stream.ReleaseRef();
+ }
+ }
+protected:
+ std::vector<PXCCapture::VideoStream::ProfileInfo> m_profiles;
+ int m_profileIdx;
+ int m_frameIdx;
+ pxcU64 m_timeStampStartNS;
+ double m_timeStamp;
+ void enumProfiles()
+ {
+ m_profiles.clear();
+ if (!m_stream.IsValid())
+ return;
+ pxcStatus sts = PXC_STATUS_NO_ERROR;
+ for (int profidx = 0; PXC_STATUS_NO_ERROR <= sts; profidx++)
+ {
+ PXCCapture::VideoStream::ProfileInfo pinfo;
+ sts = m_stream->QueryProfile(profidx, &pinfo);
+ if (PXC_STATUS_NO_ERROR > sts)
+ break;
+ m_profiles.push_back(pinfo);
+ }
+ }
+ virtual bool prepareIplImage(PXCImage *pxcImage) = 0;
+};
+
+class CvIntelPerCStreamImage
+ : public CvIntelPerCStreamBase
+{
+public:
+ CvIntelPerCStreamImage()
+ {
+ }
+ virtual ~CvIntelPerCStreamImage()
+ {
+ }
+
+ virtual bool initStream(PXCSession *session)
+ {
+ if (!initDevice(session))
+ return false;
+ initStreamImpl(PXCImage::IMAGE_TYPE_COLOR);
+ if (!m_stream.IsValid())
+ return false;
+ enumProfiles();
+ return true;
+ }
+ virtual double getProperty(int propIdx)
+ {
+ switch (propIdx)
+ {
+ case CV_CAP_PROP_BRIGHTNESS:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_CONTRAST:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_SATURATION:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_HUE:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_GAMMA:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_SHARPNESS:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_GAIN:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_BACKLIGHT:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_EXPOSURE:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ //Add image stream specific properties
+ }
+ return CvIntelPerCStreamBase::getProperty(propIdx);
+ }
+ virtual bool setProperty(int propIdx, double propVal)
+ {
+ switch (propIdx)
+ {
+ case CV_CAP_PROP_BRIGHTNESS:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BRIGHTNESS, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_CONTRAST:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_CONTRAST, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_SATURATION:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SATURATION, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_HUE:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_HUE, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_GAMMA:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAMMA, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_SHARPNESS:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_SHARPNESS, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_GAIN:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_GAIN, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_BACKLIGHT:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_BACK_LIGHT_COMPENSATION, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_EXPOSURE:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_COLOR_EXPOSURE, (float)propVal));
+ }
+ break;
+ //Add image stream specific properties
+ }
+ return CvIntelPerCStreamBase::setProperty(propIdx, propVal);
+ }
+public:
+ IplImage* retrieveFrame()
+ {
+ return m_frame.retrieveFrame();
+ }
+protected:
+ FrameInternal m_frame;
+ bool prepareIplImage(PXCImage *pxcImage)
+ {
+ if (NULL == pxcImage)
+ return false;
+ PXCImage::ImageInfo info;
+ pxcImage->QueryInfo(&info);
+
+ PXCImage::ImageData data;
+ pxcImage->AcquireAccess(PXCImage::ACCESS_READ, PXCImage::COLOR_FORMAT_RGB24, &data);
+
+ if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type)
+ return false;
+
+ cv::Mat temp(info.height, info.width, CV_8UC3, data.planes[0], data.pitches[0]);
+ temp.copyTo(m_frame.m_mat);
+
+ pxcImage->ReleaseAccess(&data);
+ return true;
+ }
+};
+
+class CvIntelPerCStreamDepth
+ : public CvIntelPerCStreamBase
+{
+public:
+ CvIntelPerCStreamDepth()
+ {
+ }
+ virtual ~CvIntelPerCStreamDepth()
+ {
+ }
+
+ virtual bool initStream(PXCSession *session)
+ {
+ if (!initDevice(session))
+ return false;
+ initStreamImpl(PXCImage::IMAGE_TYPE_DEPTH);
+ if (!m_stream.IsValid())
+ return false;
+ enumProfiles();
+ return true;
+ }
+ virtual double getProperty(int propIdx)
+ {
+ switch (propIdx)
+ {
+ case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD:
+ {
+ if (!m_device.IsValid())
+ return 0.0;
+ float fret = 0.0f;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, &fret))
+ return (double)fret;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ:
+ {
+ if (!m_device.IsValid())
+ return 0.0f;
+ PXCPointF32 ptf;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf))
+ return (double)ptf.x;
+ return 0.0;
+ }
+ break;
+ case CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT:
+ {
+ if (!m_device.IsValid())
+ return 0.0f;
+ PXCPointF32 ptf;
+ if (PXC_STATUS_NO_ERROR == m_device->QueryPropertyAsPoint(PXCCapture::Device::PROPERTY_DEPTH_FOCAL_LENGTH, &ptf))
+ return (double)ptf.y;
+ return 0.0;
+ }
+ break;
+ //Add depth stream sepcific properties
+ }
+ return CvIntelPerCStreamBase::getProperty(propIdx);
+ }
+ virtual bool setProperty(int propIdx, double propVal)
+ {
+ switch (propIdx)
+ {
+ case CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_LOW_CONFIDENCE_VALUE, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_SATURATION_VALUE, (float)propVal));
+ }
+ break;
+ case CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD:
+ {
+ if (!m_device.IsValid())
+ return false;
+ return (PXC_STATUS_NO_ERROR == m_device->SetProperty(PXCCapture::Device::PROPERTY_DEPTH_CONFIDENCE_THRESHOLD, (float)propVal));
+ }
+ break;
+ //Add depth stream sepcific properties
+ }
+ return CvIntelPerCStreamBase::setProperty(propIdx, propVal);
+ }
+public:
+ IplImage* retrieveDepthFrame()
+ {
+ return m_frameDepth.retrieveFrame();
+ }
+ IplImage* retrieveIRFrame()
+ {
+ return m_frameIR.retrieveFrame();
+ }
+ IplImage* retrieveUVFrame()
+ {
+ return m_frameUV.retrieveFrame();
+ }
+protected:
+ FrameInternal m_frameDepth;
+ FrameInternal m_frameIR;
+ FrameInternal m_frameUV;
+
+ bool prepareIplImage(PXCImage *pxcImage)
+ {
+ if (NULL == pxcImage)
+ return false;
+ PXCImage::ImageInfo info;
+ pxcImage->QueryInfo(&info);
+
+ PXCImage::ImageData data;
+ pxcImage->AcquireAccess(PXCImage::ACCESS_READ, &data);
+
+ if (PXCImage::SURFACE_TYPE_SYSTEM_MEMORY != data.type)
+ return false;
+
+ if (PXCImage::COLOR_FORMAT_DEPTH != data.format)
+ return false;
+
+ {
+ cv::Mat temp(info.height, info.width, CV_16SC1, data.planes[0], data.pitches[0]);
+ temp.copyTo(m_frameDepth.m_mat);
+ }
+ {
+ cv::Mat temp(info.height, info.width, CV_16SC1, data.planes[1], data.pitches[1]);
+ temp.copyTo(m_frameIR.m_mat);
+ }
+ {
+ cv::Mat temp(info.height, info.width, CV_32FC2, data.planes[2], data.pitches[2]);
+ temp.copyTo(m_frameUV.m_mat);
+ }
+
+ pxcImage->ReleaseAccess(&data);
+ return true;
+ }
+};
+
+///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
+class CvCapture_IntelPerC : public CvCapture
+{
+public:
+ CvCapture_IntelPerC(int /*index*/)
+ : m_contextOpened(false)
+ {
+ pxcStatus sts = PXCSession_Create(&m_session);
+ if (PXC_STATUS_NO_ERROR > sts)
+ return;
+ m_contextOpened = m_imageStream.initStream(m_session);
+ m_contextOpened &= m_depthStream.initStream(m_session);
+ }
+ virtual ~CvCapture_IntelPerC(){}
+
+ virtual double getProperty(int propIdx)
+ {
+ double propValue = 0;
+ int purePropIdx = propIdx & ~CV_CAP_INTELPERC_STREAMS_MASK;
+ if (CV_CAP_INTELPERC_IMAGE_STREAM == (propIdx & CV_CAP_INTELPERC_STREAMS_MASK))
+ {
+ propValue = m_imageStream.getProperty(purePropIdx);
+ }
+ else if (CV_CAP_INTELPERC_DEPTH_STREAM == (propIdx & CV_CAP_INTELPERC_STREAMS_MASK))
+ {
+ propValue = m_depthStream.getProperty(purePropIdx);
+ }
+ return propValue;
+ }
+ virtual bool setProperty(int propIdx, double propVal)
+ {
+ bool isSet = false;
+ int purePropIdx = propIdx & ~CV_CAP_INTELPERC_STREAMS_MASK;
+ if (CV_CAP_INTELPERC_IMAGE_STREAM == (propIdx & CV_CAP_INTELPERC_STREAMS_MASK))
+ {
+ isSet = m_imageStream.setProperty(purePropIdx, propVal);
+ }
+ else if (CV_CAP_INTELPERC_DEPTH_STREAM == (propIdx & CV_CAP_INTELPERC_STREAMS_MASK))
+ {
+ isSet = m_depthStream.setProperty(purePropIdx, propVal);
+ }
+ return isSet;
+ }
+
+ bool grabFrame()
+ {
+ if (!isOpened())
+ return false;
+
+ bool isGrabbed = false;
+ if (m_depthStream.isValid())
+ isGrabbed = m_depthStream.grabFrame();
+ if ((m_imageStream.isValid()) && (-1 != m_imageStream.getProfileIDX()))
+ isGrabbed &= m_imageStream.grabFrame();
+
+ return isGrabbed;
+ }
+
+ virtual IplImage* retrieveFrame(int outputType)
+ {
+ IplImage* image = 0;
+ switch (outputType)
+ {
+ case CV_CAP_INTELPERC_DEPTH_MAP:
+ image = m_depthStream.retrieveDepthFrame();
+ break;
+ case CV_CAP_INTELPERC_UVDEPTH_MAP:
+ image = m_depthStream.retrieveUVFrame();
+ break;
+ case CV_CAP_INTELPERC_IR_MAP:
+ image = m_depthStream.retrieveIRFrame();
+ break;
+ case CV_CAP_INTELPERC_IMAGE:
+ image = m_imageStream.retrieveFrame();
+ break;
+ }
+ CV_Assert(NULL != image);
+ return image;
+ }
+
+ bool isOpened() const
+ {
+ return m_contextOpened;
+ }
+protected:
+ bool m_contextOpened;
+
+ PXCSmartPtr<PXCSession> m_session;
+ CvIntelPerCStreamImage m_imageStream;
+ CvIntelPerCStreamDepth m_depthStream;
+};
+
+
+CvCapture* cvCreateCameraCapture_IntelPerC(int index)
+{
+ CvCapture_IntelPerC* capture = new CvCapture_IntelPerC(index);
+
+ if( capture->isOpened() )
+ return capture;
+
+ delete capture;
+ return 0;
+}
+
+
+#endif //HAVE_INTELPERC
CvCapture* cvCreateCameraCapture_Android( int index );
CvCapture* cvCreateCameraCapture_XIMEA( int index );
CvCapture* cvCreateCameraCapture_AVFoundation(int index);
+CvCapture* cvCreateCameraCapture_IntelPerC(int index);
CVAPI(int) cvHaveImageReader(const char* filename);
defined(HAVE_XIMEA) || \
defined(HAVE_AVFOUNDATION) || \
defined(HAVE_GIGE_API) || \
+ defined(HAVE_INTELPERC) || \
(0)
//defined(HAVE_ANDROID_NATIVE_CAMERA) || - enable after #1193
# define BUILD_WITH_CAMERA_SUPPORT 1
--- /dev/null
+// testOpenCVCam.cpp : Defines the entry point for the console application.
+//
+
+#include <tchar.h>
+#include "opencv2/highgui/highgui.hpp"
+//#include "opencv2/imgproc/imgproc.hpp"
+
+#include <iostream>
+
+using namespace cv;
+using namespace std;
+
+static bool g_printStreamSetting = false;
+static int g_imageStreamProfileIdx = -1;
+static int g_depthStreamProfileIdx = -1;
+static bool g_irStreamShow = false;
+static double g_imageBrightness = -DBL_MAX;
+static double g_imageContrast = -DBL_MAX;
+static bool g_printTiming = false;
+static bool g_showClosedPoint = false;
+
+
+static int g_closedDepthPoint[2];
+
+static void printUsage(char *arg0)
+{
+ char *filename = arg0;
+ while (*filename)
+ filename++;
+ while ((arg0 <= filename) && ('\\' != *filename) && ('//' != *filename))
+ filename--;
+ filename++;
+
+ cout << "This program demonstrates usage of camera supported\nby Intel Perceptual computing SDK." << endl << endl;
+ cout << "usage: " << filename << "[-ps] [-isp IDX] [-dsp IDX]\n [-ir] [-imb VAL] [-imc VAL]" << endl << endl;
+ cout << " -ps, print streams setting and profiles" << endl;
+ cout << " -isp IDX, set profile index of the image stream" << endl;
+ cout << " -dsp IDX, set profile index of the depth stream" << endl;
+ cout << " -ir, show data from IR stream" << endl;
+ cout << " -imb VAL, set brighness value for a image stream" << endl;
+ cout << " -imc VAL, set contrast value for a image stream" << endl;
+ cout << " -pts, print frame index and frame time" << endl;
+ cout << " --show-closed, print frame index and frame time" << endl;
+ cout << endl;
+}
+
+static void parseCMDLine(int argc, char* argv[])
+{
+ if( argc == 1 )
+ {
+ printUsage(argv[0]);
+ }
+ else
+ {
+ for( int i = 1; i < argc; i++ )
+ {
+ if ((0 == strcmp(argv[i], "--help")) || (0 == strcmp( argv[i], "-h")))
+ {
+ printUsage(argv[0]);
+ exit(0);
+ }
+ else if ((0 == strcmp( argv[i], "--print-streams")) || (0 == strcmp( argv[i], "-ps")))
+ {
+ g_printStreamSetting = true;
+ }
+ else if ((0 == strcmp( argv[i], "--image-stream-prof")) || (0 == strcmp( argv[i], "-isp")))
+ {
+ g_imageStreamProfileIdx = atoi(argv[++i]);
+ }
+ else if ((0 == strcmp( argv[i], "--depth-stream-prof")) || (0 == strcmp( argv[i], "-dsp")))
+ {
+ g_depthStreamProfileIdx = atoi(argv[++i]);
+ }
+ else if (0 == strcmp( argv[i], "-ir"))
+ {
+ g_irStreamShow = true;
+ }
+ else if (0 == strcmp( argv[i], "-imb"))
+ {
+ g_imageBrightness = atof(argv[++i]);
+ }
+ else if (0 == strcmp( argv[i], "-imc"))
+ {
+ g_imageContrast = atof(argv[++i]);
+ }
+ else if (0 == strcmp(argv[i], "-pts"))
+ {
+ g_printTiming = true;
+ }
+ else if (0 == strcmp(argv[i], "--show-closed"))
+ {
+ g_showClosedPoint = true;
+ }
+ else
+ {
+ cout << "Unsupported command line argument: " << argv[i] << "." << endl;
+ exit(-1);
+ }
+ }
+ if (g_closedDepthPoint && (-1 == g_depthStreamProfileIdx))
+ {
+ cerr << "For --show-closed depth profile has be selected" << endl;
+ exit(-1);
+ }
+ }
+}
+
+static void printStreamProperties(VideoCapture &capture)
+{
+ size_t profilesCount = (size_t)capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_COUNT);
+ cout << "Image stream." << endl;
+ cout << " Brightness = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_BRIGHTNESS) << endl;
+ cout << " Contrast = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_CONTRAST) << endl;
+ cout << " Saturation = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_SATURATION) << endl;
+ cout << " Hue = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_HUE) << endl;
+ cout << " Gamma = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_GAMMA) << endl;
+ cout << " Sharpness = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_SHARPNESS) << endl;
+ cout << " Gain = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_GAIN) << endl;
+ cout << " Backligh = " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_BACKLIGHT) << endl;
+ cout << "Image streams profiles:" << endl;
+ for (size_t i = 0; i < profilesCount; i++)
+ {
+ capture.set(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)i);
+ cout << " Profile[" << i << "]: ";
+ cout << "width = " <<
+ (int)capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_FRAME_WIDTH);
+ cout << ", height = " <<
+ (int)capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_FRAME_HEIGHT);
+ cout << ", fps = " <<
+ capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_FPS);
+ cout << endl;
+ }
+
+ profilesCount = (size_t)capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_COUNT);
+ cout << "Depth stream." << endl;
+ cout << " Low confidence value = " << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE) << endl;
+ cout << " Saturation value = " << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE) << endl;
+ cout << " Confidence threshold = " << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_CONFIDENCE_THRESHOLD) << endl;
+ cout << " Focal length = (" << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_HORZ) << ", "
+ << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_FOCAL_LENGTH_VERT) << ")" << endl;
+ cout << "Depth streams profiles:" << endl;
+ for (size_t i = 0; i < profilesCount; i++)
+ {
+ capture.set(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)i);
+ cout << " Profile[" << i << "]: ";
+ cout << "width = " <<
+ (int)capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_FRAME_WIDTH);
+ cout << ", height = " <<
+ (int)capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_FRAME_HEIGHT);
+ cout << ", fps = " <<
+ capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_FPS);
+ cout << endl;
+ }
+}
+
+static void imshowImage(const char *winname, Mat &image, VideoCapture &capture)
+{
+ if (g_closedDepthPoint)
+ {
+ Mat uvMap;
+ if (capture.retrieve(uvMap, CV_CAP_INTELPERC_UVDEPTH_MAP))
+ {
+ float *uvmap = (float *)uvMap.ptr() + 2 * (g_closedDepthPoint[0] * uvMap.cols + g_closedDepthPoint[1]);
+ int x = (int)((*uvmap) * image.cols); uvmap++;
+ int y = (int)((*uvmap) * image.rows);
+
+ if ((0 <= x) && (0 <= y))
+ {
+ static const int pointSize = 4;
+ for (int row = y; row < min(y + pointSize, image.rows); row++)
+ {
+ uchar* ptrDst = image.ptr(row) + x * 3 + 2;//+2 -> Red
+ for (int col = 0; col < min(pointSize, image.cols - x); col++, ptrDst+=3)
+ {
+ *ptrDst = 255;
+ }
+ }
+ }
+ }
+ }
+ imshow(winname, image);
+}
+static void imshowIR(const char *winname, Mat &ir)
+{
+ Mat image;
+ if (g_showClosedPoint)
+ {
+ image.create(ir.rows, ir.cols, CV_8UC3);
+ for (int row = 0; row < ir.rows; row++)
+ {
+ uchar* ptrDst = image.ptr(row);
+ short* ptrSrc = (short*)ir.ptr(row);
+ for (int col = 0; col < ir.cols; col++, ptrSrc++)
+ {
+ uchar val = (uchar) ((*ptrSrc) >> 2);
+ *ptrDst = val; ptrDst++;
+ *ptrDst = val; ptrDst++;
+ *ptrDst = val; ptrDst++;
+ }
+ }
+
+ static const int pointSize = 4;
+ for (int row = g_closedDepthPoint[0]; row < min(g_closedDepthPoint[0] + pointSize, image.rows); row++)
+ {
+ uchar* ptrDst = image.ptr(row) + g_closedDepthPoint[1] * 3 + 2;//+2 -> Red
+ for (int col = 0; col < min(pointSize, image.cols - g_closedDepthPoint[1]); col++, ptrDst+=3)
+ {
+ *ptrDst = 255;
+ }
+ }
+ }
+ else
+ {
+ image.create(ir.rows, ir.cols, CV_8UC1);
+ for (int row = 0; row < ir.rows; row++)
+ {
+ uchar* ptrDst = image.ptr(row);
+ short* ptrSrc = (short*)ir.ptr(row);
+ for (int col = 0; col < ir.cols; col++, ptrSrc++, ptrDst++)
+ {
+ *ptrDst = (uchar) ((*ptrSrc) >> 2);
+ }
+ }
+ }
+
+ imshow(winname, image);
+}
+static void imshowDepth(const char *winname, Mat &depth, VideoCapture &capture)
+{
+ short lowValue = (short)capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_LOW_CONFIDENCE_VALUE);
+ short saturationValue = (short)capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_DEPTH_SATURATION_VALUE);
+
+ Mat image;
+ if (g_showClosedPoint)
+ {
+ image.create(depth.rows, depth.cols, CV_8UC3);
+ for (int row = 0; row < depth.rows; row++)
+ {
+ uchar* ptrDst = image.ptr(row);
+ short* ptrSrc = (short*)depth.ptr(row);
+ for (int col = 0; col < depth.cols; col++, ptrSrc++)
+ {
+ if ((lowValue == (*ptrSrc)) || (saturationValue == (*ptrSrc)))
+ {
+ *ptrDst = 0; ptrDst++;
+ *ptrDst = 0; ptrDst++;
+ *ptrDst = 0; ptrDst++;
+ }
+ else
+ {
+ uchar val = (uchar) ((*ptrSrc) >> 2);
+ *ptrDst = val; ptrDst++;
+ *ptrDst = val; ptrDst++;
+ *ptrDst = val; ptrDst++;
+ }
+ }
+ }
+
+ static const int pointSize = 4;
+ for (int row = g_closedDepthPoint[0]; row < min(g_closedDepthPoint[0] + pointSize, image.rows); row++)
+ {
+ uchar* ptrDst = image.ptr(row) + g_closedDepthPoint[1] * 3 + 2;//+2 -> Red
+ for (int col = 0; col < min(pointSize, image.cols - g_closedDepthPoint[1]); col++, ptrDst+=3)
+ {
+ *ptrDst = 255;
+ }
+ }
+ }
+ else
+ {
+ image.create(depth.rows, depth.cols, CV_8UC1);
+ for (int row = 0; row < depth.rows; row++)
+ {
+ uchar* ptrDst = image.ptr(row);
+ short* ptrSrc = (short*)depth.ptr(row);
+ for (int col = 0; col < depth.cols; col++, ptrSrc++, ptrDst++)
+ {
+ if ((lowValue == (*ptrSrc)) || (saturationValue == (*ptrSrc)))
+ *ptrDst = 0;
+ else
+ *ptrDst = (uchar) ((*ptrSrc) >> 2);
+ }
+ }
+ }
+ imshow(winname, image);
+}
+
+int _tmain(int argc, char* argv[])
+{
+ parseCMDLine(argc, argv);
+
+ VideoCapture capture;
+ capture.open(CV_CAP_INTELPERC);
+ if (!capture.isOpened())
+ {
+ cerr << "Can not open a capture object." << endl;
+ return -1;
+ }
+
+ if (g_printStreamSetting)
+ printStreamProperties(capture);
+
+ if (-1 != g_imageStreamProfileIdx)
+ {
+ if (!capture.set(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)g_imageStreamProfileIdx))
+ {
+ cerr << "Can not setup a image stream." << endl;
+ return -1;
+ }
+ }
+ if (-1 != g_depthStreamProfileIdx)
+ {
+ if (!capture.set(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_IDX, (double)g_depthStreamProfileIdx))
+ {
+ cerr << "Can not setup a depth stream." << endl;
+ return -1;
+ }
+ }
+ else if (g_irStreamShow)
+ {
+ if (!capture.set(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_INTELPERC_PROFILE_IDX, 0.0))
+ {
+ cerr << "Can not setup a IR stream." << endl;
+ return -1;
+ }
+ }
+ else
+ {
+ cout << "Streams not selected" << endl;
+ return 0;
+ }
+
+ //Setup additional properies only after set profile of the stream
+ if ( (-10000.0 < g_imageBrightness) && (g_imageBrightness < 10000.0))
+ capture.set(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_BRIGHTNESS, g_imageBrightness);
+ if ( (0 < g_imageContrast) && (g_imageContrast < 10000.0))
+ capture.set(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_BRIGHTNESS, g_imageContrast);
+
+ int frame = 0;
+ for(;;frame++)
+ {
+ Mat bgrImage;
+ Mat depthImage;
+ Mat irImage;
+
+ if (!capture.grab())
+ {
+ cout << "Can not grab images." << endl;
+ return -1;
+ }
+
+ if ((-1 != g_depthStreamProfileIdx) && (capture.retrieve(depthImage, CV_CAP_INTELPERC_DEPTH_MAP)))
+ {
+ if (g_closedDepthPoint)
+ {
+ double minVal = 0.0; double maxVal = 0.0;
+ minMaxIdx(depthImage, &minVal, &maxVal, g_closedDepthPoint);
+ }
+ imshowDepth("depth image", depthImage, capture);
+ }
+ if ((g_irStreamShow) && (capture.retrieve(irImage, CV_CAP_INTELPERC_IR_MAP)))
+ imshowIR("ir image", irImage);
+ if ((-1 != g_imageStreamProfileIdx) && (capture.retrieve(bgrImage, CV_CAP_INTELPERC_IMAGE)))
+ imshowImage("color image", bgrImage, capture);
+
+ if (g_printTiming)
+ {
+ cout << "Image frame: " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_POS_FRAMES)
+ << ", Depth(IR) frame: " << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_POS_FRAMES) << endl;
+ cout << "Image frame: " << capture.get(CV_CAP_INTELPERC_IMAGE_STREAM | CV_CAP_PROP_POS_MSEC)
+ << ", Depth(IR) frame: " << capture.get(CV_CAP_INTELPERC_DEPTH_STREAM | CV_CAP_PROP_POS_MSEC) << endl;
+ }
+ if( waitKey(30) >= 0 )
+ break;
+ }
+
+ return 0;
+}
+