/*
-// Sample demonstrating interoperability of OpenCV UMat with Direct X surface
-// At first, the data obtained from video file or camera and
-// placed onto Direct X surface,
-// following mapping of this Direct X surface to OpenCV UMat and call cv::Blur
-// function. The result is mapped back to Direct X surface and rendered through
-// Direct X API.
+// A sample program demonstrating interoperability of OpenCV cv::UMat with Direct X surface
+// At first, the data obtained from video file or camera and placed onto Direct X surface,
+// following mapping of this Direct X surface to OpenCV cv::UMat and call cv::Blur function.
+// The result is mapped back to Direct X surface and rendered through Direct X API.
*/
+
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <d3d10.h>
#pragma comment (lib, "d3d10.lib")
-
-using namespace std;
-using namespace cv;
-
class D3D10WinApp : public D3DSample
{
public:
&m_pD3D10Dev);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
r = m_pD3D10SwapChain->GetBuffer(0, __uuidof(ID3D10Texture2D), (LPVOID*)&m_pBackBuffer);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
r = m_pD3D10Dev->CreateRenderTargetView(m_pBackBuffer, NULL, &m_pRenderTarget);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
m_pD3D10Dev->OMSetRenderTargets(1, &m_pRenderTarget, NULL);
if (FAILED(r))
{
std::cerr << "Can't create texture with input image" << std::endl;
- return -1;
+ return EXIT_FAILURE;
}
// initialize OpenCL context of OpenCV lib from DirectX
cv::ocl::Context::getDefault().device(0).name() :
"No OpenCL device";
- return 0;
+ return EXIT_SUCCESS;
} // create()
HRESULT r;
if (!m_cap.read(m_frame_bgr))
- return -1;
+ return EXIT_FAILURE;
- cv::cvtColor(m_frame_bgr, m_frame_rgba, COLOR_BGR2RGBA);
+ cv::cvtColor(m_frame_bgr, m_frame_rgba, cv::COLOR_BGR2RGBA);
UINT subResource = ::D3D10CalcSubresource(0, 0, 1);
*ppSurface = m_pSurface;
- return 0;
+ return EXIT_SUCCESS;
} // get_surface()
try
{
if (m_shutdown)
- return 0;
+ return EXIT_SUCCESS;
// capture user input once
MODE mode = (m_mode == MODE_GPU_NV12) ? MODE_GPU_RGBA : m_mode;
r = get_surface(&pSurface);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
+ m_timer.reset();
m_timer.start();
switch (mode)
if (m_demo_processing)
{
// blur D3D10 surface with OpenCV on CPU
- cv::blur(m, m, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(m, m, cv::Size(15, 15));
}
+ m_timer.stop();
+
cv::String strMode = cv::format("mode: %s", m_modeStr[MODE_CPU].c_str());
cv::String strProcessing = m_demo_processing ? "blur frame" : "copy frame";
- cv::String strTime = cv::format("time: %4.1f msec", m_timer.time(Timer::UNITS::MSEC));
+ cv::String strTime = cv::format("time: %4.3f msec", m_timer.getTimeMilli());
cv::String strDevName = cv::format("OpenCL device: %s", m_oclDevName.c_str());
- cv::putText(m, strMode, cv::Point(0, 16), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(m, strProcessing, cv::Point(0, 32), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(m, strTime, cv::Point(0, 48), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(m, strDevName, cv::Point(0, 64), 1, 0.8, cv::Scalar(0, 0, 0));
+ cv::putText(m, strMode, cv::Point(0, 20), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(m, strProcessing, cv::Point(0, 40), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(m, strTime, cv::Point(0, 60), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(m, strDevName, cv::Point(0, 80), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
pSurface->Unmap(subResource);
if (m_demo_processing)
{
// blur D3D10 surface with OpenCV on GPU with OpenCL
- cv::blur(u, u, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(u, u, cv::Size(15, 15));
}
+ m_timer.stop();
+
cv::String strMode = cv::format("mode: %s", m_modeStr[MODE_GPU_RGBA].c_str());
cv::String strProcessing = m_demo_processing ? "blur frame" : "copy frame";
- cv::String strTime = cv::format("time: %4.1f msec", m_timer.time(Timer::UNITS::MSEC));
+ cv::String strTime = cv::format("time: %4.3f msec", m_timer.getTimeMilli());
cv::String strDevName = cv::format("OpenCL device: %s", m_oclDevName.c_str());
- cv::putText(u, strMode, cv::Point(0, 16), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(u, strProcessing, cv::Point(0, 32), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(u, strTime, cv::Point(0, 48), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(u, strDevName, cv::Point(0, 64), 1, 0.8, cv::Scalar(0, 0, 0));
+ cv::putText(u, strMode, cv::Point(0, 20), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(u, strProcessing, cv::Point(0, 40), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(u, strTime, cv::Point(0, 60), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(u, strDevName, cv::Point(0, 80), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
cv::directx::convertToD3D10Texture2D(u, pSurface);
} // switch
- m_timer.stop();
-
// traditional DX render pipeline:
// BitBlt surface to backBuffer and flip backBuffer to frontBuffer
m_pD3D10Dev->CopyResource(m_pBackBuffer, pSurface);
r = m_pD3D10SwapChain->Present(0, 0);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
} // try
return 10;
}
- return 0;
+ return EXIT_SUCCESS;
} // render()
SAFE_RELEASE(m_pRenderTarget);
SAFE_RELEASE(m_pD3D10Dev);
D3DSample::cleanup();
- return 0;
+ return EXIT_SUCCESS;
} // cleanup()
private:
/*
-// Sample demonstrating interoperability of OpenCV UMat with Direct X surface
-// At first, the data obtained from video file or camera and
-// placed onto Direct X surface,
-// following mapping of this Direct X surface to OpenCV UMat and call cv::Blur
-// function. The result is mapped back to Direct X surface and rendered through
-// Direct X API.
+// A sample program demonstrating interoperability of OpenCV cv::UMat with Direct X surface
+// At first, the data obtained from video file or camera and placed onto Direct X surface,
+// following mapping of this Direct X surface to OpenCV cv::UMat and call cv::Blur function.
+// The result is mapped back to Direct X surface and rendered through Direct X API.
*/
+
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <d3d11.h>
#pragma comment (lib, "d3d11.lib")
-
-using namespace std;
-using namespace cv;
-
class D3D11WinApp : public D3DSample
{
public:
cv::ocl::Context::getDefault().device(0).name() :
"No OpenCL device";
- return 0;
+ return EXIT_SUCCESS;
} // create()
HRESULT r;
if (!m_cap.read(m_frame_bgr))
- return -1;
+ return EXIT_FAILURE;
if (use_nv12)
{
- cv::cvtColor(m_frame_bgr, m_frame_i420, COLOR_BGR2YUV_I420);
+ cv::cvtColor(m_frame_bgr, m_frame_i420, cv::COLOR_BGR2YUV_I420);
convert_I420_to_NV12(m_frame_i420, m_frame_nv12, m_width, m_height);
}
else
{
- cv::cvtColor(m_frame_bgr, m_frame_rgba, COLOR_BGR2RGBA);
+ cv::cvtColor(m_frame_bgr, m_frame_rgba, cv::COLOR_BGR2RGBA);
// process video frame on CPU
UINT subResource = ::D3D11CalcSubresource(0, 0, 1);
*ppSurface = use_nv12 ? m_pSurfaceNV12 : m_pSurfaceRGBA;
- return 0;
+ return EXIT_SUCCESS;
} // get_surface()
try
{
if (m_shutdown)
- return 0;
+ return EXIT_SUCCESS;
// capture user input once
MODE mode = (m_mode == MODE_GPU_NV12 && !m_nv12_available) ? MODE_GPU_RGBA : m_mode;
throw std::runtime_error("get_surface() failed!");
}
+ m_timer.reset();
m_timer.start();
switch (mode)
if (m_demo_processing)
{
// blur data from D3D11 surface with OpenCV on CPU
- cv::blur(m, m, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(m, m, cv::Size(15, 15));
}
+ m_timer.stop();
+
cv::String strMode = cv::format("mode: %s", m_modeStr[MODE_CPU].c_str());
cv::String strProcessing = m_demo_processing ? "blur frame" : "copy frame";
- cv::String strTime = cv::format("time: %4.1f msec", m_timer.time(Timer::UNITS::MSEC));
+ cv::String strTime = cv::format("time: %4.3f msec", m_timer.getTimeMilli());
cv::String strDevName = cv::format("OpenCL device: %s", m_oclDevName.c_str());
- cv::putText(m, strMode, cv::Point(0, 16), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(m, strProcessing, cv::Point(0, 32), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(m, strTime, cv::Point(0, 48), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(m, strDevName, cv::Point(0, 64), 1, 0.8, cv::Scalar(0, 0, 0));
+ cv::putText(m, strMode, cv::Point(0, 20), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(m, strProcessing, cv::Point(0, 40), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(m, strTime, cv::Point(0, 60), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(m, strDevName, cv::Point(0, 80), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
m_pD3D11Ctx->Unmap(pSurface, subResource);
if (m_demo_processing)
{
// blur data from D3D11 surface with OpenCV on GPU with OpenCL
- cv::blur(u, u, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(u, u, cv::Size(15, 15));
}
+ m_timer.stop();
+
cv::String strMode = cv::format("mode: %s", m_modeStr[mode].c_str());
cv::String strProcessing = m_demo_processing ? "blur frame" : "copy frame";
- cv::String strTime = cv::format("time: %4.1f msec", m_timer.time(Timer::UNITS::MSEC));
+ cv::String strTime = cv::format("time: %4.3f msec", m_timer.getTimeMilli());
cv::String strDevName = cv::format("OpenCL device: %s", m_oclDevName.c_str());
- cv::putText(u, strMode, cv::Point(0, 16), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(u, strProcessing, cv::Point(0, 32), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(u, strTime, cv::Point(0, 48), 1, 0.8, cv::Scalar(0, 0, 0));
- cv::putText(u, strDevName, cv::Point(0, 64), 1, 0.8, cv::Scalar(0, 0, 0));
+ cv::putText(u, strMode, cv::Point(0, 20), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(u, strProcessing, cv::Point(0, 40), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(u, strTime, cv::Point(0, 60), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
+ cv::putText(u, strDevName, cv::Point(0, 80), cv::FONT_HERSHEY_SIMPLEX, 0.8, cv::Scalar(0, 0, 200), 2);
cv::directx::convertToD3D11Texture2D(u, pSurface);
}
cv::Mat frame_nv12(m_height + (m_height / 2), m_width, CV_8UC1, mappedTex.pData, mappedTex.RowPitch);
- cv::cvtColor(frame_nv12, m_frame_rgba, COLOR_YUV2RGBA_NV12);
+ cv::cvtColor(frame_nv12, m_frame_rgba, cv::COLOR_YUV2RGBA_NV12);
m_pD3D11Ctx->Unmap(m_pSurfaceNV12_cpu_copy, subResource);
}
} // switch
- m_timer.stop();
-
// traditional DX render pipeline:
// BitBlt surface to backBuffer and flip backBuffer to frontBuffer
m_pD3D11Ctx->CopyResource(m_pBackBuffer, pSurface);
return 11;
}
- return 0;
+ return EXIT_SUCCESS;
} // render()
SAFE_RELEASE(m_pD3D11Dev);
SAFE_RELEASE(m_pD3D11Ctx);
D3DSample::cleanup();
- return 0;
+ return EXIT_SUCCESS;
} // cleanup()
protected:
/*
-// Sample demonstrating interoperability of OpenCV UMat with Direct X surface
-// At first, the data obtained from video file or camera and
-// placed onto Direct X surface,
-// following mapping of this Direct X surface to OpenCV UMat and call cv::Blur
-// function. The result is mapped back to Direct X surface and rendered through
-// Direct X API.
+// A sample program demonstrating interoperability of OpenCV cv::UMat with Direct X surface
+// At first, the data obtained from video file or camera and placed onto Direct X surface,
+// following mapping of this Direct X surface to OpenCV cv::UMat and call cv::Blur function.
+// The result is mapped back to Direct X surface and rendered through Direct X API.
*/
+
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <d3d9.h>
#pragma comment (lib, "d3d9.lib")
-using namespace std;
-using namespace cv;
-
class D3D9WinApp : public D3DSample
{
public:
m_pD3D9 = ::Direct3DCreate9(D3D_SDK_VERSION);
if (NULL == m_pD3D9)
{
- return -1;
+ return EXIT_FAILURE;
}
DWORD flags = D3DCREATE_HARDWARE_VERTEXPROCESSING |
r = m_pD3D9->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, m_hWnd, flags, &d3dpp, &m_pD3D9Dev);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
r = m_pD3D9Dev->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &m_pBackBuffer);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
r = m_pD3D9Dev->CreateOffscreenPlainSurface(m_width, m_height, D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, &m_pSurface, NULL);
if (FAILED(r))
{
std::cerr << "Can't create surface for result" << std::endl;
- return -1;
+ return EXIT_FAILURE;
}
// initialize OpenCL context of OpenCV lib from DirectX
cv::ocl::Context::getDefault().device(0).name() :
"No OpenCL device";
- return 0;
+ return EXIT_SUCCESS;
} // create()
HRESULT r;
if (!m_cap.read(m_frame_bgr))
- return -1;
+ return EXIT_FAILURE;
- cv::cvtColor(m_frame_bgr, m_frame_rgba, COLOR_BGR2BGRA);
+ cv::cvtColor(m_frame_bgr, m_frame_rgba, cv::COLOR_BGR2BGRA);
D3DLOCKED_RECT memDesc = { 0, NULL };
RECT rc = { 0, 0, m_width, m_height };
*ppSurface = m_pSurface;
- return 0;
+ return EXIT_SUCCESS;
} // get_surface()
try
{
if (m_shutdown)
- return 0;
+ return EXIT_SUCCESS;
// capture user input once
MODE mode = (m_mode == MODE_GPU_NV12) ? MODE_GPU_RGBA : m_mode;
r = get_surface(&pSurface);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
+ m_timer.reset();
m_timer.start();
switch (mode)
r = pSurface->LockRect(&memDesc, &rc, 0);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
cv::Mat m(m_height, m_width, CV_8UC4, memDesc.pBits, memDesc.Pitch);
if (m_demo_processing)
{
// blur D3D9 surface with OpenCV on CPU
- cv::blur(m, m, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(m, m, cv::Size(15, 15));
}
r = pSurface->UnlockRect();
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
break;
if (m_demo_processing)
{
// blur D3D9 surface with OpenCV on GPU with OpenCL
- cv::blur(u, u, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(u, u, cv::Size(15, 15));
}
cv::directx::convertToDirect3DSurface9(u, pSurface);
m_timer.stop();
- print_info(pSurface, mode, m_timer.time(Timer::UNITS::MSEC), m_oclDevName);
+ print_info(pSurface, mode, m_timer.getTimeMilli(), m_oclDevName);
// traditional DX render pipeline:
// BitBlt surface to backBuffer and flip backBuffer to frontBuffer
r = m_pD3D9Dev->StretchRect(pSurface, NULL, m_pBackBuffer, NULL, D3DTEXF_NONE);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
// present the back buffer contents to the display
r = m_pD3D9Dev->Present(NULL, NULL, NULL, NULL);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
} // try
return 10;
}
- return 0;
+ return EXIT_SUCCESS;
} // render()
- void print_info(LPDIRECT3DSURFACE9 pSurface, int mode, float time, cv::String oclDevName)
+ void print_info(LPDIRECT3DSURFACE9 pSurface, int mode, double time, cv::String oclDevName)
{
HDC hDC;
SAFE_RELEASE(m_pD3D9Dev);
SAFE_RELEASE(m_pD3D9);
D3DSample::cleanup();
- return 0;
+ return EXIT_SUCCESS;
} // cleanup()
private:
/*
-// Sample demonstrating interoperability of OpenCV UMat with Direct X surface
-// At first, the data obtained from video file or camera and
-// placed onto Direct X surface,
-// following mapping of this Direct X surface to OpenCV UMat and call cv::Blur
-// function. The result is mapped back to Direct X surface and rendered through
-// Direct X API.
+// A sample program demonstrating interoperability of OpenCV cv::UMat with Direct X surface
+// At first, the data obtained from video file or camera and placed onto Direct X surface,
+// following mapping of this Direct X surface to OpenCV cv::UMat and call cv::Blur function.
+// The result is mapped back to Direct X surface and rendered through Direct X API.
*/
+
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include <d3d9.h>
#pragma comment (lib, "d3d9.lib")
-using namespace std;
-using namespace cv;
-
class D3D9ExWinApp : public D3DSample
{
public:
r = ::Direct3DCreate9Ex(D3D_SDK_VERSION, &m_pD3D9Ex);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
DWORD flags = D3DCREATE_HARDWARE_VERTEXPROCESSING |
r = m_pD3D9Ex->CreateDeviceEx(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, m_hWnd, flags, &d3dpp, NULL, &m_pD3D9DevEx);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
r = m_pD3D9DevEx->GetBackBuffer(0, 0, D3DBACKBUFFER_TYPE_MONO, &m_pBackBuffer);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
r = m_pD3D9DevEx->CreateOffscreenPlainSurface(m_width, m_height, D3DFMT_A8R8G8B8, D3DPOOL_DEFAULT, &m_pSurface, NULL);
if (FAILED(r))
{
std::cerr << "Can't create surface for result" << std::endl;
- return -1;
+ return EXIT_FAILURE;
}
// initialize OpenCL context of OpenCV lib from DirectX
cv::ocl::Context::getDefault().device(0).name() :
"No OpenCL device";
- return 0;
+ return EXIT_SUCCESS;
} // create()
HRESULT r;
if (!m_cap.read(m_frame_bgr))
- return -1;
+ return EXIT_FAILURE;
- cv::cvtColor(m_frame_bgr, m_frame_rgba, COLOR_BGR2BGRA);
+ cv::cvtColor(m_frame_bgr, m_frame_rgba, cv::COLOR_BGR2BGRA);
D3DLOCKED_RECT memDesc = { 0, NULL };
RECT rc = { 0, 0, m_width, m_height };
*ppSurface = m_pSurface;
- return 0;
+ return EXIT_SUCCESS;
} // get_surface()
try
{
if (m_shutdown)
- return 0;
+ return EXIT_SUCCESS;
// capture user input once
MODE mode = m_mode == MODE_GPU_NV12 ? MODE_GPU_RGBA : m_mode;
r = get_surface(&pSurface);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
+ m_timer.reset();
m_timer.start();
switch (mode)
r = pSurface->LockRect(&memDesc, &rc, 0);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
cv::Mat m(m_height, m_width, CV_8UC4, memDesc.pBits, memDesc.Pitch);
if (m_demo_processing)
{
// blur D3D9 surface with OpenCV on CPU
- cv::blur(m, m, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(m, m, cv::Size(15, 15));
}
r = pSurface->UnlockRect();
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
break;
if (m_demo_processing)
{
// blur D3D9 surface with OpenCV on GPU with OpenCL
- cv::blur(u, u, cv::Size(15, 15), cv::Point(-7, -7));
+ cv::blur(u, u, cv::Size(15, 15));
}
cv::directx::convertToDirect3DSurface9(u, pSurface);
m_timer.stop();
- print_info(pSurface, m_mode, m_timer.time(Timer::UNITS::MSEC), m_oclDevName);
+ print_info(pSurface, m_mode, m_timer.getTimeMilli(), m_oclDevName);
// traditional DX render pipeline:
// BitBlt surface to backBuffer and flip backBuffer to frontBuffer
r = m_pD3D9DevEx->StretchRect(pSurface, NULL, m_pBackBuffer, NULL, D3DTEXF_NONE);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
// present the back buffer contents to the display
r = m_pD3D9DevEx->Present(NULL, NULL, NULL, NULL);
if (FAILED(r))
{
- return -1;
+ return EXIT_FAILURE;
}
} // try
return 10;
}
- return 0;
+ return EXIT_SUCCESS;
} // render()
- void print_info(LPDIRECT3DSURFACE9 pSurface, int mode, float time, cv::String oclDevName)
+ void print_info(LPDIRECT3DSURFACE9 pSurface, int mode, double time, cv::String oclDevName)
{
HDC hDC;
SAFE_RELEASE(m_pD3D9DevEx);
SAFE_RELEASE(m_pD3D9Ex);
D3DSample::cleanup();
- return 0;
+ return EXIT_SUCCESS;
} // cleanup()
private:
#define SAFE_RELEASE(p) if (p) { p->Release(); p = NULL; }
-class Timer
-{
-public:
- enum UNITS
- {
- USEC = 0,
- MSEC,
- SEC
- };
-
- Timer() : m_t0(0), m_diff(0)
- {
- m_tick_frequency = (float)cv::getTickFrequency();
-
- m_unit_mul[USEC] = 1000000;
- m_unit_mul[MSEC] = 1000;
- m_unit_mul[SEC] = 1;
- }
-
- void start()
- {
- m_t0 = cv::getTickCount();
- }
-
- void stop()
- {
- m_diff = cv::getTickCount() - m_t0;
- }
-
- float time(UNITS u = UNITS::MSEC)
- {
- float sec = m_diff / m_tick_frequency;
-
- return sec * m_unit_mul[u];
- }
-
-public:
- float m_tick_frequency;
- int64 m_t0;
- int64 m_diff;
- int m_unit_mul[3];
-};
-
-
class D3DSample : public WinApp
{
public:
if (wParam == '1')
{
m_mode = MODE_CPU;
- return 0;
+ return EXIT_SUCCESS;
}
if (wParam == '2')
{
m_mode = MODE_GPU_RGBA;
- return 0;
+ return EXIT_SUCCESS;
}
if (wParam == '3')
{
m_mode = MODE_GPU_NV12;
- return 0;
+ return EXIT_SUCCESS;
}
else if (wParam == VK_SPACE)
{
m_demo_processing = !m_demo_processing;
- return 0;
+ return EXIT_SUCCESS;
}
else if (wParam == VK_ESCAPE)
{
case WM_DESTROY:
::PostQuitMessage(0);
- return 0;
+ return EXIT_SUCCESS;
}
return ::DefWindowProc(hWnd, message, wParam, lParam);
cv::VideoCapture m_cap;
cv::Mat m_frame_bgr;
cv::Mat m_frame_rgba;
- Timer m_timer;
+ cv::TickMeter m_timer;
};
-static void help()
-{
- printf(
- "\nSample demonstrating interoperability of DirectX and OpenCL with OpenCV.\n"
- "Hot keys: \n"
- " SPACE - turn processing on/off\n"
- " 1 - process DX surface through OpenCV on CPU\n"
- " 2 - process DX RGBA surface through OpenCV on GPU (via OpenCL)\n"
- " 3 - process DX NV12 surface through OpenCV on GPU (via OpenCL)\n"
- " ESC - exit\n\n");
-}
-
-
static const char* keys =
{
- "{c camera | true | use camera or not}"
+ "{c camera | 0 | camera id }"
"{f file | | movie file name }"
- "{h help | | print help info }"
};
{
cv::CommandLineParser parser(argc, argv, keys);
std::string file = parser.get<std::string>("file");
- bool useCamera = parser.has("camera");
- bool showHelp = parser.has("help");
+ int camera_id = parser.get<int>("camera");
- if (showHelp)
- help();
+ parser.about(
+ "\nA sample program demonstrating interoperability of DirectX and OpenCL with OpenCV.\n\n"
+ "Hot keys: \n"
+ " SPACE - turn processing on/off\n"
+ " 1 - process DX surface through OpenCV on CPU\n"
+ " 2 - process DX RGBA surface through OpenCV on GPU (via OpenCL)\n"
+ " 3 - process DX NV12 surface through OpenCV on GPU (via OpenCL)\n"
+ " ESC - exit\n\n");
parser.printMessage();
cv::VideoCapture cap;
- if (useCamera)
- cap.open(0);
+ if (file.empty())
+ cap.open(camera_id);
else
cap.open(file.c_str());
if (!cap.isOpened())
{
printf("can not open camera or video file\n");
- return -1;
+ return EXIT_FAILURE;
}
int width = (int)cap.get(cv::CAP_PROP_FRAME_WIDTH);
# pragma comment(lib, "glu32.lib")
#endif
-using namespace cv;
-
-/*
-// Press key to
-// 1 processing on CPU
-// 2 processing on GPU
-// 9 toggle texture/buffer
-// space toggle processing on/off, preserve mode
-// esc quit
-*/
class GLWinApp : public WinApp
{
if (wParam == '1')
{
set_mode(MODE_CPU);
- return 0;
+ return EXIT_SUCCESS;
}
if (wParam == '2')
{
set_mode(MODE_GPU);
- return 0;
+ return EXIT_SUCCESS;
}
else if (wParam == '9')
{
toggle_buffer();
- return 0;
+ return EXIT_SUCCESS;
}
else if (wParam == VK_SPACE)
{
m_demo_processing = !m_demo_processing;
- return 0;
+ return EXIT_SUCCESS;
}
else if (wParam == VK_ESCAPE)
{
cleanup();
- return 0;
+ return EXIT_SUCCESS;
}
break;
case WM_CLOSE:
cleanup();
- return 0;
+ return EXIT_SUCCESS;
case WM_DESTROY:
::PostQuitMessage(0);
- return 0;
+ return EXIT_SUCCESS;
}
return ::DefWindowProc(hWnd, message, wParam, lParam);
}
else
{
- return 0;
+ return EXIT_SUCCESS;
}
break;
case Expose:
}
break;
default:
- return 0;
+ return EXIT_SUCCESS;
}
return 1;
}
if (setup_pixel_format() != 0)
{
std::cerr << "Can't setup pixel format" << std::endl;
- return -1;
+ return EXIT_FAILURE;
}
m_hRC = wglCreateContext(m_hDC);
cv::ocl::Context::getDefault().device(0).name() :
(char*) "No OpenCL device";
- return 0;
+ return EXIT_SUCCESS;
} // init()
int get_frame(cv::ogl::Texture2D& texture, cv::ogl::Buffer& buffer, bool do_buffer)
{
if (!m_cap.read(m_frame_bgr))
- return -1;
+ return EXIT_FAILURE;
- cv::cvtColor(m_frame_bgr, m_frame_rgba, COLOR_RGB2RGBA);
+ cv::cvtColor(m_frame_bgr, m_frame_rgba, cv::COLOR_RGB2RGBA);
if (do_buffer)
buffer.copyFrom(m_frame_rgba, cv::ogl::Buffer::PIXEL_UNPACK_BUFFER, true);
else
texture.copyFrom(m_frame_rgba, true);
- return 0;
+ return EXIT_SUCCESS;
}
- void print_info(MODE mode, float time, cv::String& oclDevName)
+ void print_info(MODE mode, double time, cv::String& oclDevName)
{
#if defined(_WIN32)
HDC hDC = m_hDC;
try
{
if (m_shutdown)
- return 0;
+ return EXIT_SUCCESS;
int r;
cv::ogl::Texture2D texture;
r = get_frame(texture, buffer, do_buffer);
if (r != 0)
{
- return -1;
+ return EXIT_FAILURE;
}
switch (mode)
glXSwapBuffers(m_display, m_window);
#endif
- print_info(mode, m_timer.time(Timer::MSEC), m_oclDevName);
+ print_info(mode, m_timer.getTimeMilli(), m_oclDevName);
}
return 10;
}
- return 0;
+ return EXIT_SUCCESS;
}
protected:
{
cv::Mat m(m_height, m_width, CV_8UC4);
+ m_timer.reset();
m_timer.start();
if (do_buffer)
{
cv::UMat u;
+ m_timer.reset();
m_timer.start();
if (do_buffer)
int pfmt = ChoosePixelFormat(m_hDC, &pfd);
if (pfmt == 0)
- return -1;
+ return EXIT_FAILURE;
if (SetPixelFormat(m_hDC, pfmt, &pfd) == 0)
return -2;
- return 0;
+ return EXIT_SUCCESS;
}
#endif
cv::String m_oclDevName;
};
-static void help()
-{
- printf(
- "\nSample demonstrating interoperability of OpenGL and OpenCL with OpenCV.\n"
- "Hot keys: \n"
- " SPACE - turn processing on/off\n"
- " 1 - process GL data through OpenCV on CPU\n"
- " 2 - process GL data through OpenCV on GPU (via OpenCL)\n"
- " 9 - toggle use of GL texture/GL buffer\n"
- " ESC - exit\n\n");
-}
-
static const char* keys =
{
- "{c camera | true | use camera or not}"
+ "{c camera | 0 | camera id }"
"{f file | | movie file name }"
- "{h help | false | print help info }"
};
using namespace cv;
int main(int argc, char** argv)
{
cv::CommandLineParser parser(argc, argv, keys);
- bool useCamera = parser.get<bool>("camera");
+ int camera_id = parser.get<int>("camera");
string file = parser.get<string>("file");
- bool showHelp = parser.get<bool>("help");
- if (showHelp)
- {
- help();
- return 0;
- }
+ parser.about(
+ "\nA sample program demonstrating interoperability of OpenGL and OpenCL with OpenCV.\n\n"
+ "Hot keys: \n"
+ " SPACE - turn processing on/off\n"
+ " 1 - process GL data through OpenCV on CPU\n"
+ " 2 - process GL data through OpenCV on GPU (via OpenCL)\n"
+ " 9 - toggle use of GL texture/GL buffer\n"
+ " ESC - exit\n\n");
parser.printMessage();
cv::VideoCapture cap;
- if (useCamera)
- cap.open(0);
+ if (file.empty())
+ cap.open(camera_id);
else
cap.open(file.c_str());
if (!cap.isOpened())
{
printf("can not open camera or video file\n");
- return -1;
+ return EXIT_FAILURE;
}
int width = (int)cap.get(CAP_PROP_FRAME_WIDTH);
#define SAFE_RELEASE(p) if (p) { p->Release(); p = NULL; }
-class Timer
-{
-public:
- enum UNITS
- {
- USEC = 0,
- MSEC,
- SEC
- };
-
- Timer() : m_t0(0), m_diff(0)
- {
- m_tick_frequency = (float)cv::getTickFrequency();
-
- m_unit_mul[USEC] = 1000000;
- m_unit_mul[MSEC] = 1000;
- m_unit_mul[SEC] = 1;
- }
-
- void clear()
- {
- m_t0 = m_diff = 0;
- }
-
- void start()
- {
- m_t0 = cv::getTickCount();
- }
-
- void stop()
- {
- m_diff = cv::getTickCount() - m_t0;
- }
-
- float time(UNITS u = MSEC)
- {
- float sec = m_diff / m_tick_frequency;
-
- return sec * m_unit_mul[u];
- }
-
-public:
- float m_tick_frequency;
- int64 m_t0;
- int64 m_diff;
- int m_unit_mul[3];
-};
-
class WinApp
{
public:
virtual void idle() = 0;
#if defined(_WIN32)
- HINSTANCE m_hInstance;
- HWND m_hWnd;
+ HINSTANCE m_hInstance;
+ HWND m_hWnd;
#elif defined(__linux__)
- Display* m_display;
- XVisualInfo* m_visual_info;
- Window m_window;
- long m_event_mask;
- Atom m_WM_DELETE_WINDOW;
- bool m_end_loop;
+ Display* m_display;
+ XVisualInfo* m_visual_info;
+ Window m_window;
+ long m_event_mask;
+ Atom m_WM_DELETE_WINDOW;
+ bool m_end_loop;
#endif
- int m_width;
- int m_height;
- std::string m_window_name;
- Timer m_timer;
+ int m_width;
+ int m_height;
+ std::string m_window_name;
+ cv::TickMeter m_timer;
};