-Version: 1.3.54
+Version: 1.3.56
Maintainer: Yeong-Kyoon Lee<yeongkyoon.lee@samsung.com>
Source: emulator
extern void glo_context_destroy(GloContext *context);
/* Update the context in surface and free previous light-weight context */
-extern void glo_surface_update_context(GloSurface *surface, GloContext *context);
+extern int glo_surface_update_context(GloSurface *surface, GloContext *context);
/* Link the pixmap associated with surface as texture */
extern void glo_surface_as_texture(GloSurface *surface);
/* ------------------------------------------------------------------------ */
-/* Update the context in surface and free previous light-weight context */
-void glo_surface_update_context(GloSurface *surface, GloContext *context)
-{
- if ( surface->context && (surface->context->context == 0))
- qemu_free(surface->context);
+/* Update the context in surface and handle previous context */
+int glo_surface_update_context(GloSurface *surface, GloContext *context)
+ {
+ /* If previous context is light-weight context, just free it. If previous
+ * context is valid one binded with surface via MakeCurrent, we need unbind
+ * from original glstate */
+ int prev_context_valid = 0;
+
+ if ( surface->context )
+ {
+ prev_context_valid = (surface->context->context != 0);
+ if ( !prev_context_valid ) /* light-weight context */
+ g_free(surface->context);
+ }
surface->context = context;
+ return prev_context_valid;
}
+
/* Create a surface with given width and height, formatflags are from the
* GLO_ constants */
/* ------------------------------------------------------------------------ */
-/* Update the context in surface and free previous light-weight context */
-void glo_surface_update_context(GloSurface *surface, GloContext *context)
+/* Update the context in surface and handle previous context */
+int glo_surface_update_context(GloSurface *surface, GloContext *context)
{
- if ( surface->context && !surface->context->hDC)
- g_free(surface->context);
+ /* If previous context is light-weight context, just free it. If previous
+ * context is valid one binded with surface via MakeCurrent, we need unbind
+ * from original glstate */
+ int prev_context_valid = 0;
+
+ if ( surface->context )
+ {
+ prev_context_valid = (surface->context->hContext != 0);
+ if ( !prev_context_valid ) /* light-weight context */
+ g_free(surface->context);
+ }
surface->context = context;
+ return prev_context_valid;
}
/* Create a surface with given width and height, formatflags are from the
/* ------------------------------------------------------------------------ */
-/* Update the context in surface and free previous light-weight context */
-void glo_surface_update_context(GloSurface *surface, GloContext *context)
-{
- if ( surface->context && (surface->context->context == 0))
- g_free(surface->context);
+/* Update the context in surface and handle previous context */
+int glo_surface_update_context(GloSurface *surface, GloContext *context)
+ {
+ /* If previous context is light-weight context, just free it. If previous
+ * context is valid one binded with surface via MakeCurrent, we need unbind
+ * from original glstate */
+ int prev_context_valid = 0;
+
+ if ( surface->context )
+ {
+ prev_context_valid = (surface->context->context != 0);
+ if ( !prev_context_valid ) /* light-weight context */
+ g_free(surface->context);
+ }
surface->context = context;
+ return prev_context_valid;
}
/* Create a surface with given width and height, formatflags are from the
/* ----------------------------------------------------------------------------- */\r
/* Fucntion prototype */\r
/* ----------------------------------------------------------------------------- */\r
-int marucam_device_check(void);\r
+int marucam_device_check(int log_flag);\r
void marucam_device_init(MaruCamState *state);\r
void marucam_device_open(MaruCamState *state);\r
void marucam_device_close(MaruCamState *state);\r
\r
#define CLEAR(x) memset(&(x), 0, sizeof(x))\r
\r
+static int yioctl(int fd, int req, void *arg)\r
+{\r
+ int r;\r
+\r
+ do {\r
+ r = ioctl(fd, req, arg);\r
+ } while ( r < 0 && errno == EINTR);\r
+\r
+ return r;\r
+}\r
+\r
static int xioctl(int fd, int req, void *arg)\r
{\r
int r;\r
return NULL;\r
}\r
\r
-int marucam_device_check(void)\r
+int marucam_device_check(int log_flag)\r
{\r
int tmp_fd;\r
+ struct timeval t1, t2;\r
struct stat st;\r
+ struct v4l2_fmtdesc format;\r
+ struct v4l2_frmsizeenum size;\r
struct v4l2_capability cap;\r
+ int ret = 0;\r
\r
+ gettimeofday(&t1, NULL);\r
if (stat(dev_name, &st) < 0) {\r
- INFO("<WARNING>Cannot identify '%s': %s\n", dev_name, strerror(errno));\r
+ fprintf(stdout, "[Webcam] <WARNING> Cannot identify '%s': %s\n",\r
+ dev_name, strerror(errno));\r
} else {\r
if (!S_ISCHR(st.st_mode)) {\r
- INFO("<WARNING>%s is no character device.\n", dev_name);\r
+ fprintf(stdout, "[Webcam] <WARNING>%s is no character device.\n",\r
+ dev_name);\r
}\r
}\r
\r
tmp_fd = open(dev_name, O_RDWR | O_NONBLOCK, 0);\r
if (tmp_fd < 0) {\r
- ERR("Camera device open failed.(%s)\n", dev_name);\r
- return 0;\r
+ fprintf(stdout, "[Webcam] Camera device open failed.(%s)\n", dev_name);\r
+ goto error;\r
}\r
if (ioctl(tmp_fd, VIDIOC_QUERYCAP, &cap) < 0) {\r
- ERR("Could not qeury video capabilities\n");\r
- close(tmp_fd);\r
- return 0;\r
+ fprintf(stdout, "[Webcam] Could not qeury video capabilities\n");\r
+ goto error;\r
}\r
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) ||\r
!(cap.capabilities & V4L2_CAP_STREAMING)) {\r
- ERR("Not supported video driver.\n");\r
- close(tmp_fd);\r
- return 0;\r
+ fprintf(stdout, "[Webcam] Not supported video driver.\n");\r
+ goto error;\r
}\r
+ ret = 1;\r
\r
+ if (log_flag) {\r
+ fprintf(stdout, "[Webcam] Driver : %s\n", cap.driver);\r
+ fprintf(stdout, "[Webcam] Card : %s\n", cap.card);\r
+ fprintf(stdout, "[Webcam] Bus info : %s\n", cap.bus_info);\r
+\r
+ CLEAR(format);\r
+ format.index = 0;\r
+ format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;\r
+\r
+ if (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) < 0) {\r
+ goto error;\r
+ }\r
+\r
+ do {\r
+ CLEAR(size);\r
+ size.index = 0;\r
+ size.pixel_format = format.pixelformat;\r
+\r
+ fprintf(stdout, "[Webcam] PixelFormat : %c%c%c%c\n",\r
+ (char)(format.pixelformat),\r
+ (char)(format.pixelformat >> 8),\r
+ (char)(format.pixelformat >> 16),\r
+ (char)(format.pixelformat >> 24));\r
+\r
+ if (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) {\r
+ goto error;\r
+ }\r
+\r
+ if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) {\r
+ do {\r
+ fprintf(stdout, "[Webcam] got discrete frame size %dx%d\n",\r
+ size.discrete.width, size.discrete.height);\r
+ size.index++;\r
+ } while (yioctl(tmp_fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0);\r
+ } else if (size.type == V4L2_FRMSIZE_TYPE_STEPWISE) {\r
+ fprintf(stdout, "[Webcam] we have stepwise frame sizes:\n");\r
+ fprintf(stdout, "[Webcam] min width: %d, min height: %d\n",\r
+ size.stepwise.min_width, size.stepwise.min_height);\r
+ fprintf(stdout, "[Webcam] max width: %d, max height: %d\n",\r
+ size.stepwise.max_width, size.stepwise.max_height);\r
+ fprintf(stdout, "[Webcam] step width: %d, step height: %d\n",\r
+ size.stepwise.step_width, size.stepwise.step_height);\r
+ } else if (size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) {\r
+ fprintf(stdout, "[Webcam] we have continuous frame sizes:\n");\r
+ fprintf(stdout, "[Webcam] min width: %d, min height: %d\n",\r
+ size.stepwise.min_width, size.stepwise.min_height);\r
+ fprintf(stdout, "[Webcam] max width: %d, max height: %d\n",\r
+ size.stepwise.max_width, size.stepwise.max_height);\r
+\r
+ }\r
+ format.index++;\r
+ } while (yioctl(tmp_fd, VIDIOC_ENUM_FMT, &format) >= 0);\r
+ }\r
+error:\r
close(tmp_fd);\r
- return 1;\r
+ gettimeofday(&t2, NULL);\r
+ fprintf(stdout, "[Webcam] Elapsed time : %lu:%06lu\n",\r
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
+ return ret;\r
}\r
\r
void marucam_device_init(MaruCamState* state)\r
struct timespec req;\r
req.tv_sec = 0;\r
req.tv_nsec = 10000000;\r
-\r
- INFO("Starting preview!\n");\r
+ INFO("Pixfmt(%c%c%c%C), W:H(%d:%d), buf size(%u)\n",\r
+ (char)(dst_fmt.fmt.pix.pixelformat),\r
+ (char)(dst_fmt.fmt.pix.pixelformat >> 8),\r
+ (char)(dst_fmt.fmt.pix.pixelformat >> 16),\r
+ (char)(dst_fmt.fmt.pix.pixelformat >> 24),\r
+ dst_fmt.fmt.pix.width,\r
+ dst_fmt.fmt.pix.height,\r
+ dst_fmt.fmt.pix.sizeimage);\r
+ INFO("Starting preview\n");\r
qemu_mutex_lock(&state->thread_mutex);\r
qemu_cond_signal(&state->thread_cond);\r
qemu_mutex_unlock(&state->thread_mutex);\r
while (!is_stream_paused(state))\r
nanosleep(&req, NULL);\r
\r
- INFO("Stopping preview!\n");\r
+ INFO("Stopping preview\n");\r
}\r
\r
void marucam_device_s_param(MaruCamState* state)\r
{\r
HWCInPin *This = impl_from_IPin(iface);\r
\r
- if (IsEqualIID(riid, &IID_IUnknown) || IsEqualIID(riid, &IID_IPin)) {\r
- *ppv = &This->IPin_iface;\r
+ if (IsEqualIID(riid, &IID_IUnknown)) {\r
+ *ppv = (IUnknown *)(&This->IPin_iface);\r
+ IPin_AddRef((IPin*)*ppv);\r
+ } else if (IsEqualIID(riid, &IID_IPin)) {\r
+ *ppv = (IPin *)(&This->IPin_iface);\r
IPin_AddRef((IPin*)*ppv);\r
} else if (IsEqualIID(riid, &IID_IMemInputPin)) {\r
- *ppv = &This->IMemInputPin_iface;\r
+ *ppv = (IMemInputPin *)(&This->IMemInputPin_iface);\r
IPin_AddRef((IMemInputPin*)*ppv);\r
} else {\r
*ppv = NULL;\r
SAFE_RELEASE(This->m_pConnectedPin);\r
}\r
if (This->m_pAllocator) {\r
+ IMemAllocator_Decommit(This->m_pAllocator);\r
SAFE_RELEASE(This->m_pAllocator);\r
}\r
g_free((void*)This);\r
\r
static STDMETHODIMP HWCPin_Connect(IPin *iface, IPin *pReceivePin, const AM_MEDIA_TYPE *pmt)\r
{\r
+ HWCInPin *This = impl_from_IPin(iface);\r
+\r
+ if (!pReceivePin) {\r
+ return E_POINTER;\r
+ }\r
+\r
+ if (This->m_pConnectedPin) {\r
+ return VFW_E_ALREADY_CONNECTED;\r
+ }\r
+\r
if (!pmt)\r
return S_OK;\r
return S_FALSE;\r
HWCInPin *This = impl_from_IPin(iface);\r
\r
HRESULT hr;\r
+ FILTER_STATE fs;\r
+ IBaseFilter_GetState(This->m_pCFilter, 0, &fs);\r
+ if (fs != State_Stopped) {\r
+ return VFW_E_NOT_STOPPED;\r
+ }\r
if (This->m_pConnectedPin == NULL) {\r
hr = S_FALSE;\r
} else {\r
+ if (This->m_pAllocator) {\r
+ hr = IMemAllocator_Decommit(This->m_pAllocator);\r
+ if (FAILED(hr)) {\r
+ return hr;\r
+ }\r
+ SAFE_RELEASE(This->m_pAllocator);\r
+ }\r
SAFE_RELEASE(This->m_pConnectedPin);\r
hr = S_OK;\r
}\r
return hr;\r
}\r
\r
-static STDMETHODIMP HWCPin_ConnectedTo(IPin *iface, IPin **pPin)\r
+static STDMETHODIMP HWCPin_ConnectedTo(IPin *iface, IPin **ppPin)\r
{\r
HWCInPin *This = impl_from_IPin(iface);\r
\r
- if (pPin == NULL)\r
+ if (ppPin == NULL)\r
return E_POINTER;\r
\r
if (This->m_pConnectedPin == NULL) {\r
+ *ppPin = NULL;\r
return VFW_E_NOT_CONNECTED;\r
} else {\r
- *pPin = This->m_pConnectedPin;\r
+ *ppPin = This->m_pConnectedPin;\r
IPin_AddRef(This->m_pConnectedPin);\r
}\r
return S_OK;\r
{\r
HWCInPin *This = impl_from_IMemInputPin(iface);\r
\r
- if (IsEqualIID(riid, &IID_IUnknown) || IsEqualIID(riid, &IID_IMemInputPin)) {\r
- *ppv = &This->IMemInputPin_iface;\r
- IMemInputPin_AddRef((IMemInputPin*)*ppv);\r
+ if (IsEqualIID(riid, &IID_IUnknown)) {\r
+ *ppv = (IUnknown *)(&This->IMemInputPin_iface);\r
+ IPin_AddRef((IPin*)*ppv);\r
} else if (IsEqualIID(riid, &IID_IPin)) {\r
- *ppv = &This->IPin_iface;\r
+ *ppv = (IPin *)(&This->IPin_iface);\r
IPin_AddRef((IPin*)*ppv);\r
+ } else if (IsEqualIID(riid, &IID_IMemInputPin)) {\r
+ *ppv = (IMemInputPin *)(&This->IMemInputPin_iface);\r
+ IPin_AddRef((IMemInputPin*)*ppv);\r
} else {\r
*ppv = NULL;\r
return E_NOINTERFACE;\r
SAFE_RELEASE(This->m_pConnectedPin);\r
}\r
if (This->m_pAllocator) {\r
+ IMemAllocator_Decommit(This->m_pAllocator);\r
SAFE_RELEASE(This->m_pAllocator);\r
}\r
g_free((void*)This);\r
return hr;\r
}\r
\r
-int marucam_device_check(void)\r
+static char* __wchar_to_char(const WCHAR *pwstr)\r
{\r
+ char *pstr = NULL;\r
+ int len = 0;\r
+\r
+ len = wcslen(pwstr) + 1;\r
+ pstr = (char *)g_malloc0(sizeof(char) * len);\r
+ wcstombs(pstr, pwstr, len + 1);\r
+\r
+ return pstr;\r
+}\r
+\r
+int marucam_device_check(int log_flag)\r
+{\r
+ struct timeval t1, t2;\r
int ret = 0;\r
+ char *device_name = NULL;\r
HRESULT hr = E_FAIL;\r
ICreateDevEnum *pCreateDevEnum = NULL;\r
+ IGraphBuilder *pGB = NULL;\r
+ ICaptureGraphBuilder2 *pCGB = NULL;\r
+ IBaseFilter *pSrcFilter = NULL;\r
IEnumMoniker *pEnumMK = NULL;\r
IMoniker *pMoniKer = NULL;\r
+ IAMStreamConfig *pSConfig = NULL;\r
+ int iCount = 0, iSize = 0;\r
\r
+ gettimeofday(&t1, NULL);\r
hr = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);\r
if (FAILED(hr)) {\r
- ERR("[%s] failed to CoInitailizeEx\n", __func__);\r
- goto error;\r
+ fprintf(stdout, "[Webcam] failed to CoInitailizeEx\n");\r
+ goto leave_check;\r
}\r
\r
- hr = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC, &IID_ICreateDevEnum, (void**)&pCreateDevEnum);\r
+ hr = CoCreateInstance(&CLSID_FilterGraph, NULL,\r
+ CLSCTX_INPROC,\r
+ &IID_IGraphBuilder,\r
+ (void**)&pGB);\r
if (FAILED(hr)) {\r
- ERR("[%s] failed to create instance of CLSID_SystemDeviceEnum\n", __func__);\r
- goto error;\r
+ fprintf(stdout, "[Webcam] Failed to create GraphBuilder, 0x%x\n", hr);\r
+ goto leave_check;\r
}\r
\r
- hr = pCreateDevEnum->lpVtbl->CreateClassEnumerator(pCreateDevEnum, &CLSID_VideoInputDeviceCategory, &pEnumMK, 0);\r
- if (FAILED(hr))\r
- {\r
- ERR("[%s] failed to create class enumerator\n", __func__);\r
- goto error;\r
+ hr = CoCreateInstance(&CLSID_CaptureGraphBuilder2, NULL,\r
+ CLSCTX_INPROC,\r
+ &IID_ICaptureGraphBuilder2,\r
+ (void**)&pCGB);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout,\r
+ "[Webcam] Failed to create CaptureGraphBuilder2, 0x%x\n", hr);\r
+ goto leave_check;\r
}\r
\r
- if (!pEnumMK)\r
- {\r
- ERR("[%s] class enumerator is NULL!!\n", __func__);\r
- goto error;\r
+ hr = pCGB->lpVtbl->SetFiltergraph(pCGB, pGB);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout, "[Webcam] Failed to SetFiltergraph, 0x%x\n", hr);\r
+ goto leave_check;\r
+ }\r
+\r
+ hr = CoCreateInstance(&CLSID_SystemDeviceEnum, NULL,\r
+ CLSCTX_INPROC,\r
+ &IID_ICreateDevEnum,\r
+ (void**)&pCreateDevEnum);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout,\r
+ "[Webcam] failed to create instance of CLSID_SystemDeviceEnum\n");\r
+ goto leave_check;\r
+ }\r
+\r
+ hr = pCreateDevEnum->lpVtbl->CreateClassEnumerator(pCreateDevEnum,\r
+ &CLSID_VideoInputDeviceCategory, &pEnumMK, 0);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout, "[Webcam] failed to create class enumerator\n");\r
+ goto leave_check;\r
+ }\r
+\r
+ if (!pEnumMK) {\r
+ fprintf(stdout, "[Webcam] class enumerator is NULL!!\n");\r
+ goto leave_check;\r
}\r
pEnumMK->lpVtbl->Reset(pEnumMK);\r
\r
hr = pEnumMK->lpVtbl->Next(pEnumMK, 1, &pMoniKer, NULL);\r
- if (hr == S_FALSE)\r
+ if (FAILED(hr) || (hr == S_FALSE)) {\r
+ fprintf(stdout, "[Webcam] enum moniker returns a invalid value.\n");\r
+ goto leave_check;\r
+ }\r
+\r
+ IPropertyBag *pBag = NULL;\r
+ hr = pMoniKer->lpVtbl->BindToStorage(pMoniKer, 0, 0,\r
+ &IID_IPropertyBag,\r
+ (void **)&pBag);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout, "[Webcam] failed to bind to storage.\n");\r
+ goto leave_check;\r
+ } else {\r
+ VARIANT var;\r
+ var.vt = VT_BSTR;\r
+ hr = pBag->lpVtbl->Read(pBag, L"FriendlyName", &var, NULL);\r
+ if (hr == S_OK) {\r
+ ret = 1;\r
+ if (!log_flag) {\r
+ SysFreeString(var.bstrVal);\r
+ SAFE_RELEASE(pBag);\r
+ SAFE_RELEASE(pMoniKer);\r
+ goto leave_check;\r
+ }\r
+ device_name = __wchar_to_char(var.bstrVal);\r
+ fprintf(stdout, "[Webcam] Device name : %s\n", device_name);\r
+ g_free(device_name);\r
+ hr = pMoniKer->lpVtbl->BindToObject(pMoniKer, NULL, NULL,\r
+ &IID_IBaseFilter,\r
+ (void**)&pSrcFilter);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout,\r
+ "[Webcam] Counldn't bind moniker to filter object!!\n");\r
+ goto leave_check;\r
+ } else {\r
+ pSrcFilter->lpVtbl->AddRef(pSrcFilter);\r
+ }\r
+ SysFreeString(var.bstrVal);\r
+ }\r
+ SAFE_RELEASE(pBag);\r
+ }\r
+ SAFE_RELEASE(pMoniKer);\r
+\r
+ hr = pGB->lpVtbl->AddFilter(pGB, pSrcFilter, L"Video Capture");\r
+ if (hr != S_OK && hr != S_FALSE) {\r
+ fprintf(stdout,\r
+ "[Webcam] Counldn't add Video Capture filter to our graph!\n");\r
+ goto leave_check;\r
+ }\r
+\r
+ hr = pCGB->lpVtbl->FindInterface(pCGB, &PIN_CATEGORY_CAPTURE, 0,\r
+ pSrcFilter, &IID_IAMStreamConfig,\r
+ (void**)&pSConfig);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout, "[Webcam] failed to FindInterface method\n");\r
+ goto leave_check;\r
+ }\r
+\r
+ hr = pSConfig->lpVtbl->GetNumberOfCapabilities(pSConfig, &iCount, &iSize);\r
+ if (FAILED(hr))\r
{\r
- ERR("[%s] enum moniker returns a invalid value.\n", __func__);\r
- hr = E_FAIL;\r
+ fprintf(stdout, "[Webcam] failed to GetNumberOfCapabilities method\n");\r
+ goto leave_check;\r
}\r
- if (SUCCEEDED(hr))\r
+\r
+ if (iSize == sizeof(VIDEO_STREAM_CONFIG_CAPS))\r
{\r
- IPropertyBag *pBag = NULL;\r
- hr = pMoniKer->lpVtbl->BindToStorage(pMoniKer, 0, 0, &IID_IPropertyBag, (void **)&pBag);\r
- if (SUCCEEDED(hr))\r
+ int iFormat = 0;\r
+ for (iFormat = 0; iFormat < iCount; iFormat++)\r
{\r
- VARIANT var;\r
- var.vt = VT_BSTR;\r
- hr = pBag->lpVtbl->Read(pBag, L"FriendlyName", &var, NULL);\r
- if (hr == NOERROR)\r
+ VIDEO_STREAM_CONFIG_CAPS scc;\r
+ AM_MEDIA_TYPE *pmtConfig;\r
+\r
+ hr = pSConfig->lpVtbl->GetStreamCaps(pSConfig, iFormat, &pmtConfig,\r
+ (BYTE*)&scc);\r
+ if (hr == S_OK)\r
{\r
- ret = 1;\r
- SysFreeString(var.bstrVal);\r
+ if (IsEqualIID(&pmtConfig->formattype, &FORMAT_VideoInfo))\r
+ {\r
+ VIDEOINFOHEADER *pvi =\r
+ (VIDEOINFOHEADER *)pmtConfig->pbFormat;\r
+ if (pvi->bmiHeader.biCompression == BI_RGB) {\r
+ fprintf(stdout, "[Webcam] RGB BitCount: %d, %ux%u\n",\r
+ pvi->bmiHeader.biBitCount,\r
+ pvi->bmiHeader.biWidth,\r
+ pvi->bmiHeader.biHeight);\r
+ } else {\r
+ fprintf(stdout,\r
+ "[Webcam] PixelFormat: %c%c%c%c, %ux%u\n",\r
+ (char)(pvi->bmiHeader.biCompression),\r
+ (char)(pvi->bmiHeader.biCompression >> 8),\r
+ (char)(pvi->bmiHeader.biCompression >> 16),\r
+ (char)(pvi->bmiHeader.biCompression >> 24),\r
+ pvi->bmiHeader.biWidth,\r
+ pvi->bmiHeader.biHeight);\r
+ }\r
+ }\r
+ DeleteMediaType(pmtConfig);\r
}\r
- SAFE_RELEASE(pBag);\r
}\r
- SAFE_RELEASE(pMoniKer);\r
}\r
\r
-error:\r
+ hr = pGB->lpVtbl->RemoveFilter(pGB, pSrcFilter);\r
+ if (FAILED(hr)) {\r
+ fprintf(stdout, "[Webcam] Failed to remove source filer. 0x%x\n", hr);\r
+ }\r
+\r
+leave_check:\r
+ SAFE_RELEASE(pSConfig);\r
+ SAFE_RELEASE(pSrcFilter);\r
+ SAFE_RELEASE(pCGB);\r
+ SAFE_RELEASE(pGB);\r
SAFE_RELEASE(pEnumMK);\r
SAFE_RELEASE(pCreateDevEnum);\r
CoUninitialize();\r
+ gettimeofday(&t2, NULL);\r
+ fprintf(stdout, "[Webcam] Elapsed time : %lu.%06lu\n",\r
+ t2.tv_sec-t1.tv_sec, t2.tv_usec-t1.tv_usec);\r
+\r
return ret;\r
}\r
\r
goto error_failed;\r
}\r
\r
- INFO("Open successfully!!!\n");\r
+ INFO("Opened\n");\r
return;\r
\r
error_failed:\r
RemoveFilters();\r
CloseInterfaces();\r
CoUninitialize();\r
- INFO("Close successfully!!!\n");\r
+ INFO("Closed\n");\r
}\r
\r
/* MARUCAM_CMD_START_PREVIEW */\r
pixfmt = supported_dst_pixfmts[cur_fmt_idx].fmt;\r
state->buf_size = get_sizeimage(pixfmt, width, height);\r
\r
- INFO("Pixfmt(0x%x), Width:Height(%d:%d), buffer size(%u)\n",\r
- pixfmt, width, height, state->buf_size);\r
+ INFO("Pixfmt(%c%c%c%c), W:H(%d:%d), buf size(%u)\n",\r
+ (char)(pixfmt), (char)(pixfmt >> 8),\r
+ (char)(pixfmt >> 16), (char)(pixfmt >> 24),\r
+ width, height, state->buf_size);\r
+ INFO("Starting preview\n");\r
\r
assert(g_pCallback != NULL);\r
hr = ((HWCInPin*)g_pInputPin)->SetGrabCallbackIF(g_pInputPin, g_pCallback);\r
state->streamon = 1;\r
qemu_mutex_unlock(&state->thread_mutex);\r
\r
- INFO("Start preview!!!\n");\r
+ INFO("Streaming on ......\n");\r
}\r
\r
/* MARUCAM_CMD_STOP_PREVIEW */\r
MaruCamParam *param = state->param;\r
param->top = 0;\r
\r
+ INFO("...... Streaming off\n");\r
qemu_mutex_lock(&state->thread_mutex);\r
state->streamon = 0;\r
qemu_mutex_unlock(&state->thread_mutex);\r
\r
state->buf_size = 0;\r
\r
- INFO("Stop preview!!!\n");\r
+ INFO("Stopping preview\n");\r
}\r
\r
/* MARUCAM_CMD_S_PARAM */\r
#define PCI_DEVICE_ID_VIRTUAL_BRIGHTNESS 0x1014
#define PCI_DEVICE_ID_VIRTUAL_CAMERA 0x1018
#define PCI_DEVICE_ID_VIRTUAL_CODEC 0x101C
+// Device ID 0x1000 through 0x103F inclusive is a virtio device
#define PCI_DEVICE_ID_VIRTIO_TOUCHSCREEN 0x101D
-/* virtio */
-#define VIRTIO_ID_TOUCHSCREEN 10
+/* Virtio */
+/*
++----------------------+--------------------+---------------+
+| Subsystem Device ID | Virtio Device | Specification |
++----------------------+--------------------+---------------+
++----------------------+--------------------+---------------+
+| 1 | network card | Appendix C |
++----------------------+--------------------+---------------+
+| 2 | block device | Appendix D |
++----------------------+--------------------+---------------+
+| 3 | console | Appendix E |
++----------------------+--------------------+---------------+
+| 4 | entropy source | Appendix F |
++----------------------+--------------------+---------------+
+| 5 | memory ballooning | Appendix G |
++----------------------+--------------------+---------------+
+| 6 | ioMemory | - |
++----------------------+--------------------+---------------+
+| 7 | rpmsg | Appendix H |
++----------------------+--------------------+---------------+
+| 8 | SCSI host | Appendix I |
++----------------------+--------------------+---------------+
+| 9 | 9P transport | - |
++----------------------+--------------------+---------------+
+| 10 | mac80211 wlan | - |
++----------------------+--------------------+---------------+
+*/
+#define VIRTIO_ID_TOUCHSCREEN 11
#endif /* MARU_DEVICE_IDS_H_ */
state->current_qsurface = qsurface;
}
+/* Unbind a qsurface from a context (GLState) */
+static void unbind_qsurface(GLState *state,
+ QGloSurface *qsurface)
+{
+ qsurface->glstate = NULL;
+
+ QTAILQ_REMOVE(&state->qsurfaces, qsurface, next);
+
+ if ( state->current_qsurface == qsurface )
+ state->current_qsurface = NULL;
+}
+
/* Find the qsurface with required drawable in active & pending qsurfaces */
QGloSurface* find_qsurface_from_client_drawable(ProcessState *process, ClientGLXDrawable client_drawable)
{
/* process->pending_qsurfaces[i] = NULL;*/
qsurface->ref = 1;
/* qsurface->surface->context = glstate->context;*/
- glo_surface_update_context(qsurface->surface, glstate->context);
+ if ( glo_surface_update_context(qsurface->surface, glstate->context) )
+ unbind_qsurface(qsurface->glstate, qsurface);
bind_qsurface(glstate, qsurface);
return 1;
}
void qemu_ds_shm_refresh(DisplayState *ds)
{
- //TODO:
+ vga_hw_update();
}
void maruskin_shm_init(uint64 swt_handle, int lcd_size_width, int lcd_size_height, bool is_resize)
<target name="make-properties">
<echo message="make properties..." />
<tstamp>
- <format property="build_time" pattern="yyyyMMdd-hhmm" timezone="GMT+9" locale="en" />
+ <format property="build_time" pattern="yyyyMMdd-HHmm" timezone="GMT+9" locale="en" />
</tstamp>
<exec executable="cat" outputproperty="version">
<arg value="../../VERSION" />
return;
}
- ProcessBuilder procBrowser = new ProcessBuilder();
-
- if (SwtUtil.isLinuxPlatform()) {
- procBrowser.command("nautilus", "--browser", openPath);
- } else if (SwtUtil.isWindowsPlatform()) {
- procBrowser.command("explorer", "\"" + openPath + "\"");
- } else if (SwtUtil.isMacPlatform()) {
- //TODO:
- logger.warning( "not supported yet" );
- }
-
- if (procBrowser.command().isEmpty() == false) {
- try {
- procBrowser.start();
- } catch (Exception e) {
- logger.log( Level.SEVERE, e.getMessage(), e);
- }
- }
-
- if (openPath.compareTo(VALUE_NONE) == 0 || openPath.compareTo("") == 0) {
- return;
- }
-
Program.launch(openPath);
/*ProcessBuilder procBrowser = new ProcessBuilder();
switch ( rotation_type ) {
case ROTATION_PORTRAIT:
- sprintf( send_buf, "1\n3\n0\n-9.80665\n0\n" );
+ sprintf( send_buf, "1\n3\n0\n9.80665\n0\n" );
break;
case ROTATION_LANDSCAPE:
- sprintf( send_buf, "1\n3\n-9.80665\n0\n0\n" );
+ sprintf( send_buf, "1\n3\n9.80665\n0\n0\n" );
break;
case ROTATION_REVERSE_PORTRAIT:
- sprintf( send_buf, "1\n3\n0\n9.80665\n0\n" );
+ sprintf( send_buf, "1\n3\n0\n-9.80665\n0\n" );
break;
case ROTATION_REVERSE_LANDSCAPE:
- sprintf(send_buf, "1\n3\n9.80665\n0\n0\n");
+ sprintf(send_buf, "1\n3\n-9.80665\n0\n0\n");
break;
default:
int enable_gl = 0;
int capability_check_gl = 0;
#endif
-#if defined(CONFIG_MARU)
-extern int marucam_device_check(void);
+#if defined(CONFIG_MARU) && (!defined(CONFIG_DARWIN))
+#define WEBCAM_INFO_IGNORE 0x00
+#define WEBCAM_INFO_WRITE 0x04
+extern int marucam_device_check(int log_flag);
int is_webcam_enabled = 0;
#endif
}
#endif
#endif
-#if defined(CONFIG_MARU)
+#if defined(CONFIG_MARU) && (!defined(CONFIG_DARWIN))
if (!is_webcam_enabled) {
const char *driver = qemu_opt_get(opts, "driver");
if (driver && (strcmp (driver, MARUCAM_DEV_NAME) == 0)) {
#endif
#endif
-#if defined(CONFIG_MARU)
- is_webcam_enabled = marucam_device_check();
+#if defined(CONFIG_MARU) && (!defined(CONFIG_DARWIN))
+ is_webcam_enabled = marucam_device_check(WEBCAM_INFO_WRITE);
if (!is_webcam_enabled) {
- fprintf (stderr, "WARNING: Webcam support was disabled due to "
- "the fail of webcam capability check!\n");
+ fprintf (stderr, "[Webcam] <WARNING> Webcam support was disabled "
+ "due to the fail of webcam capability check!\n");
}
gchar *tmp_cam_kcmd = kernel_cmdline;
kernel_cmdline = g_strdup_printf("%s enable_cam=%d", tmp_cam_kcmd, is_webcam_enabled);
- fprintf(stdout, "kernel command : %s\n", kernel_cmdline);
g_free(tmp_cam_kcmd);
if (is_webcam_enabled) {
device_opt_finding_t devp = {MARUCAM_DEV_NAME, 0};
qemu_opts_foreach(qemu_find_opts("device"), find_device_opt, &devp, 0);
if (devp.found == 0) {
- if (!qemu_opts_parse(qemu_find_opts("device"), MARUCAM_DEV_NAME, "driver")) {
+ if (!qemu_opts_parse(qemu_find_opts("device"), MARUCAM_DEV_NAME, 1)) {
+ fprintf(stderr, "Failed to initialize the marucam device.\n");
exit(1);
}
}
+ fprintf(stdout, "[Webcam] Webcam support was enabled.\n");
}
#endif