From f94edf971b9fbde5945d4d76cca5bf79b34e5863 Mon Sep 17 00:00:00 2001 From: Jinkun Jang Date: Wed, 13 Mar 2013 01:36:37 +0900 Subject: [PATCH] Tizen 2.1 base --- AUTHORS | 1 + Makefile.am | 2 - README | 2 +- autogen.sh | 4 +- configure.ac | 38 +- docs/Makefile.am | 4 - docs/reference/Makefile.am | 2 +- docs/reference/gst-mfld-videosrc/Makefile.am | 94 + .../gst-mfld-videosrc/gst-mfld-videosrc-docs.sgml | 46 + .../gst-mfld-videosrc-sections.txt | 101 + .../gst-mfld-videosrc/gst-mfld-videosrc.types | 6 + gst-libs/Makefile.am | 2 +- gst-libs/atomisphal/Makefile.am | 25 + gst-libs/atomisphal/atomisp.h | 751 ++++ gst-libs/atomisphal/atomisp_v4l2.h | 58 + gst-libs/atomisphal/gstv4l2mfldadvci.c | 433 +++ gst-libs/atomisphal/gstv4l2mfldadvci.h | 285 ++ gst-libs/atomisphal/mfld_cam.c | 2039 +++++++++++ gst-libs/atomisphal/mfld_cam.h | 308 ++ gst-libs/atomisphal/mfld_cam_defs.h | 36 + gst-libs/atomisphal/mfld_driver.c | 788 ++++ gst-libs/atomisphal/mfld_driver.h | 122 + gst-libs/atomisphal/sh_css_types.h | 798 ++++ gst-libs/gst/camera/Makefile.am | 24 +- gst-libs/gst/camera/gstmfldcameracolorbalance.c | 130 + gst-libs/gst/camera/gstmfldcameracolorbalance.h | 155 + gst-libs/gst/camera/gstmfldcameracontroliface.c | 701 ++++ gst-libs/gst/camera/gstmfldcameracontroliface.h | 437 +++ gst-libs/gst/camera/gstmfldcameraphotoiface.c | 1460 ++++++++ gst-libs/gst/camera/gstmfldcameraphotoiface.h | 338 ++ gst-libs/gst/camera/gstmfldcamerasrc.c | 2671 ++++++++++++++ gst-libs/gst/camera/gstmfldcamerasrc.h | 484 +++ gst/Makefile.am | 4 +- gst/mfldv4l2cam/Makefile.am | 32 + gst/mfldv4l2cam/gstv4l2camsrc.c | 1874 ++++++++++ gst/mfldv4l2cam/gstv4l2camsrc.h | 319 ++ gst/mfldv4l2cam/gstv4l2camvidorient.c | 111 + gst/mfldv4l2cam/gstv4l2camvidorient.h | 120 + gst/mfldv4l2cam/v4l2camsrc_calls.c | 3791 ++++++++++++++++++++ gst/mfldv4l2cam/v4l2camsrc_calls.h | 141 + packaging/gst-plugins-atomisp.changes | 364 ++ packaging/gst-plugins-atomisp.spec | 53 + pkgconfig/Makefile.am | 6 +- pkgconfig/gstreamer-atomisphal.pc.in | 11 + pkgconfig/gstreamer-mfld-videosrc.pc.in | 11 + update/install.sh | 9 + update/update_daily.sh | 16 + 47 files changed, 19176 insertions(+), 31 deletions(-) create mode 100644 docs/reference/gst-mfld-videosrc/Makefile.am create mode 100644 docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-docs.sgml create mode 100644 docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-sections.txt create mode 100644 docs/reference/gst-mfld-videosrc/gst-mfld-videosrc.types create mode 100644 gst-libs/atomisphal/Makefile.am create mode 100644 gst-libs/atomisphal/atomisp.h create mode 100644 gst-libs/atomisphal/atomisp_v4l2.h create mode 100644 gst-libs/atomisphal/gstv4l2mfldadvci.c create mode 100644 gst-libs/atomisphal/gstv4l2mfldadvci.h create mode 100644 gst-libs/atomisphal/mfld_cam.c create mode 100644 gst-libs/atomisphal/mfld_cam.h create mode 100644 gst-libs/atomisphal/mfld_cam_defs.h create mode 100644 gst-libs/atomisphal/mfld_driver.c create mode 100644 gst-libs/atomisphal/mfld_driver.h create mode 100644 gst-libs/atomisphal/sh_css_types.h create mode 100644 gst-libs/gst/camera/gstmfldcameracolorbalance.c create mode 100644 gst-libs/gst/camera/gstmfldcameracolorbalance.h create mode 100644 gst-libs/gst/camera/gstmfldcameracontroliface.c create mode 100644 gst-libs/gst/camera/gstmfldcameracontroliface.h create mode 100644 gst-libs/gst/camera/gstmfldcameraphotoiface.c create mode 100644 gst-libs/gst/camera/gstmfldcameraphotoiface.h create mode 100644 gst-libs/gst/camera/gstmfldcamerasrc.c create mode 100644 gst-libs/gst/camera/gstmfldcamerasrc.h create mode 100644 gst/mfldv4l2cam/Makefile.am create mode 100644 gst/mfldv4l2cam/gstv4l2camsrc.c create mode 100644 gst/mfldv4l2cam/gstv4l2camsrc.h create mode 100644 gst/mfldv4l2cam/gstv4l2camvidorient.c create mode 100644 gst/mfldv4l2cam/gstv4l2camvidorient.h create mode 100644 gst/mfldv4l2cam/v4l2camsrc_calls.c create mode 100644 gst/mfldv4l2cam/v4l2camsrc_calls.h create mode 100644 packaging/gst-plugins-atomisp.changes create mode 100644 packaging/gst-plugins-atomisp.spec create mode 100644 pkgconfig/gstreamer-atomisphal.pc.in create mode 100644 pkgconfig/gstreamer-mfld-videosrc.pc.in create mode 100755 update/install.sh create mode 100755 update/update_daily.sh diff --git a/AUTHORS b/AUTHORS index cdaf361..ef33ed4 100644 --- a/AUTHORS +++ b/AUTHORS @@ -1 +1,2 @@ Nokia Corporation +Intel Corporation diff --git a/Makefile.am b/Makefile.am index a963700..a030352 100644 --- a/Makefile.am +++ b/Makefile.am @@ -1,3 +1 @@ -DISTCHECK_CONFIGURE_FLAGS=--enable-gtk-doc - SUBDIRS = m4 gst-libs gst docs pkgconfig diff --git a/README b/README index 0ec82e9..003f11b 100644 --- a/README +++ b/README @@ -1 +1 @@ -This is Medfield Camera Source element baseed on gst-nokia-videosrc. +This is Medfield Camera Source element based on gst-nokia-videosrc. diff --git a/autogen.sh b/autogen.sh index 701906a..a150de3 100755 --- a/autogen.sh +++ b/autogen.sh @@ -3,7 +3,7 @@ # to the right versions, or leave them unset and get the RedHat 7.3 defaults DIE=0 -package=gst-nokia-videosrc +package=gst-mfld-videosrc # autogen.sh helper functions (copied from GStreamer's common/ CVS module) if test ! -f ./gst-autogen.sh; @@ -16,7 +16,7 @@ then fi . ./gst-autogen.sh -CONFIGURE_DEF_OPT='--enable-maintainer-mode --enable-debug --enable-gtk-doc' +CONFIGURE_DEF_OPT='--enable-maintainer-mode --enable-debug' autogen_options $@ diff --git a/configure.ac b/configure.ac index c303209..33f463c 100644 --- a/configure.ac +++ b/configure.ac @@ -4,6 +4,7 @@ dnl versions of gstreamer and plugins-base GST_MAJORMINOR=0.10 GST_REQUIRED=0.10.0 GSTPB_REQUIRED=0.10.0 +LIBMFLD_REQUIRED=0.10.0 dnl fill in your package name and version here dnl the fourth (nano) number should be 0 for a release, 1 for CVS, @@ -30,7 +31,7 @@ AC_PROG_CC AC_PROG_LIBTOOL # check for gtk-doc -GTK_DOC_CHECK(1.6) +# GTK_DOC_CHECK(1.6) dnl decide on error flags AS_COMPILER_FLAG(-Wall, GST_WALL="yes", GST_WALL="no") @@ -96,6 +97,20 @@ dnl make _CFLAGS and _LIBS available AC_SUBST(GST_BASE_CFLAGS) AC_SUBST(GST_BASE_LIBS) +dnl If we need them, we can also use the base class libraries +PKG_CHECK_MODULES(GST_CONTROLLER, gstreamer-controller-$GST_MAJORMINOR >= $GST_REQUIRED, + HAVE_GST_BASE=yes, HAVE_GST_BASE=no) + +dnl Give a warning if we don't have gstreamer libs +dnl you can turn this into an error if you need them +if test "x$HAVE_GST_BASE" = "xno"; then + AC_MSG_NOTICE(no GStreamer controllerlibraries found (gstreamer-controller-$GST_MAJORMINOR)) +fi + +dnl make _CFLAGS and _LIBS available +AC_SUBST(GST_CONTROLLER_CFLAGS) +AC_SUBST(GST_CONTROLLER_LIBS) + dnl Check gst-plugins-bad PKG_CHECK_MODULES(GST_BAD, gstreamer-plugins-bad-$GST_MAJORMINOR, HAVE_GST_BAD=yes, HAVE_GST_BAD=no) @@ -139,7 +154,18 @@ AM_CONDITIONAL(HAVE_GST_CHECK, test "x$HAVE_GST_CHECK" = "xyes") AC_SUBST(GST_CHECK_CFLAGS) AC_SUBST(GST_CHECK_LIBS) - +dnl check gst check 3a support +dnl LIBMFLDADVCI_CFLAGS +dnl LIBMFLDADVCI_LIBS +HAVE_LIBMFLDADVCI=yes +PKG_CHECK_MODULES(LIBMFLDADVCI, libmfldadvci >= $LIBMFLD_REQUIRED, HAVE_LIBMFLDADVCI=yes, HAVE_LIBMFLDADVCI=no) +AM_CONDITIONAL(HAVE_LIBMFLDADVCI, test ! "x$LIBMFLDADVCI" = "xyes") +if test "x$HAVE_LIBMFLDADVCI" = "xyes"; then +LIBMFLDADVCI_CFLAGS="" +LIBMFLDADVCI_LIBS="" +AC_SUBST(LIBMFLDADVCI_CFLAGS) +AC_SUBST(LIBMFLDADVCI_LIBS) +fi AC_PATH_PROG(VALGRIND_PATH, valgrind, no) AM_CONDITIONAL(HAVE_VALGRIND, test ! "x$VALGRIND_PATH" = "xno") @@ -153,12 +179,14 @@ AC_SUBST(GENERAL_CFLAGS) AC_OUTPUT(Makefile \ m4/Makefile \ gst-libs/Makefile \ + gst-libs/atomisphal/Makefile \ gst-libs/gst/Makefile \ gst-libs/gst/camera/Makefile \ gst/Makefile \ - gst/v4l2newcam/Makefile \ + gst/mfldv4l2cam/Makefile \ docs/Makefile \ docs/reference/Makefile \ - docs/reference/gst-nokia-videosrc/Makefile + docs/reference/gst-mfld-videosrc/Makefile pkgconfig/Makefile - pkgconfig/gstreamer-nokia-videosrc.pc) + pkgconfig/gstreamer-atomisphal.pc + pkgconfig/gstreamer-mfld-videosrc.pc) diff --git a/docs/Makefile.am b/docs/Makefile.am index a3a453a..6a62f8e 100644 --- a/docs/Makefile.am +++ b/docs/Makefile.am @@ -1,7 +1,3 @@ -if ENABLE_GTK_DOC -GTK_DOC_DIRS = reference -else GTK_DOC_DIRS = -endif SUBDIRS = $(GTK_DOC_DIRS) diff --git a/docs/reference/Makefile.am b/docs/reference/Makefile.am index 4037c95..f7228cc 100644 --- a/docs/reference/Makefile.am +++ b/docs/reference/Makefile.am @@ -1 +1 @@ -SUBDIRS = gst-nokia-videosrc +SUBDIRS = gst-mfld-videosrc diff --git a/docs/reference/gst-mfld-videosrc/Makefile.am b/docs/reference/gst-mfld-videosrc/Makefile.am new file mode 100644 index 0000000..8b7ab62 --- /dev/null +++ b/docs/reference/gst-mfld-videosrc/Makefile.am @@ -0,0 +1,94 @@ +## Process this file with automake to produce Makefile.in + +# We require automake 1.6 at least. +AUTOMAKE_OPTIONS = 1.6 + +SUBDIRS=. + +# The name of the module, e.g. 'glib'. +DOC_MODULE=gst-mfld-videosrc + +# The top-level SGML file. You can change this if you want to. +DOC_MAIN_SGML_FILE=$(DOC_MODULE)-docs.sgml + +# The directory containing the source code. Relative to $(srcdir). +# gtk-doc will search all .c & .h files beneath here for inline comments +# documenting the functions and macros. +# e.g. DOC_SOURCE_DIR=../../../gtk +DOC_SOURCE_DIR = $(top_srcdir) + +# Extra options to pass to gtkdoc-scangobj. Not normally needed. +SCANGOBJ_OPTIONS=--type-init-func="g_type_init();gst_init(&argc,&argv)" + +# Extra options to supply to gtkdoc-scan. +# e.g. SCAN_OPTIONS=--deprecated-guards="GTK_DISABLE_DEPRECATED" +SCAN_OPTIONS= + +# Extra options to supply to gtkdoc-mkdb. +# e.g. MKDB_OPTIONS=--sgml-mode --output-format=xml +MKDB_OPTIONS=--sgml-mode --output-format=xml + +# Extra options to supply to gtkdoc-mktmpl +# e.g. MKTMPL_OPTIONS=--only-section-tmpl +MKTMPL_OPTIONS= + +# Extra options to supply to gtkdoc-fixref. Not normally needed. +# e.g. FIXXREF_OPTIONS=--extra-dir=../gdk-pixbuf/html --extra-dir=../gdk/html +FIXXREF_OPTIONS= + +# Used for dependencies. The docs will be rebuilt if any of these change. +# e.g. HFILE_GLOB=$(top_srcdir)/gtk/*.h +# e.g. CFILE_GLOB=$(top_srcdir)/gtk/*.c +#HFILE_GLOB=$(top_srcdir)/gst-libs/gst/camera/*.h +#CFILE_GLOB=$(top_srcdir)/gst-libs/gst/camera/*.c + +HFILE_GLOB=$(DOC_SOURCE_DIR)/*/*/*.h +CFILE_GLOB=$(DOC_SOURCE_DIR)/*/*/*.c + +SCANOBJ_DEPS = \ + $(top_builddir)/gst-libs/gst/camera/libgstmfldcamera-@GST_MAJORMINOR@.la + +# Header files to ignore when scanning. +# e.g. IGNORE_HFILES=gtkdebug.h gtkintl.h +IGNORE_HFILES=config.h + +# Images to copy into HTML directory. +# e.g. HTML_IMAGES=$(top_srcdir)/gtk/stock-icons/stock_about_24.png +HTML_IMAGES= + +# Extra SGML files that are included by $(DOC_MAIN_SGML_FILE). +# e.g. content_files=running.sgml building.sgml changes-2.0.sgml +content_files= + +# SGML files where gtk-doc abbrevations (#GtkWidget) are expanded +# These files must be listed here *and* in content_files +# e.g. expand_content_files=running.sgml +expand_content_files= + +# CFLAGS and LDFLAGS for compiling gtkdoc-scangobj with your library. +# Only needed if you are using gtkdoc-scangobj to dynamically query widget +# signals and properties. +# e.g. INCLUDES=-I$(top_srcdir) -I$(top_builddir) $(GTK_DEBUG_FLAGS) +# e.g. GTKDOC_LIBS=$(top_builddir)/gtk/$(gtktargetlib) +GTKDOC_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \ + -I$(top_srcdir)/gst-libs/gst/camera + +GTKDOC_LIBS=$(GST_LIBS) $(GST_BASE_LIBS) $(SCANOBJ_DEPS) \ + -lgstinterfaces-$(GST_MAJORMINOR) \ + -lgstphotography-$(GST_MAJORMINOR) + +# This includes the standard gtk-doc make rules, copied by gtkdocize. +include $(top_srcdir)/gtk-doc.make + +# Other files to distribute +# e.g. EXTRA_DIST += version.xml.in +EXTRA_DIST += + +# Files not to distribute +# for --rebuild-types in $(SCAN_OPTIONS), e.g. $(DOC_MODULE).types +# for --rebuild-sections in $(SCAN_OPTIONS) e.g. $(DOC_MODULE)-sections.txt +#DISTCLEANFILES += + +# Comment this out if you want your docs-status tested during 'make check' +#TESTS = $(GTKDOC_CHECK) + diff --git a/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-docs.sgml b/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-docs.sgml new file mode 100644 index 0000000..49cd50d --- /dev/null +++ b/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-docs.sgml @@ -0,0 +1,46 @@ + + + + + gstreamer0.10-mfld-videosrc Reference Manual + + for gstreamer-mfld-videosrc 0.10 + + + + + + GStreamer Camera Source base class public API + + + + + + + SubdevSrc element + + + + + + + + + + New V4L2camsrc element + + + + + + Object Hierarchy + + + + + API Index + + diff --git a/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-sections.txt b/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-sections.txt new file mode 100644 index 0000000..943959c --- /dev/null +++ b/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc-sections.txt @@ -0,0 +1,101 @@ +
+gstcamerasrc +GstCameraSrc base class +gst-libs/gst/camera/gstcamerasrc.h +GstCameraSrc +GstCameraSrcClass +GstCameraSrcCaptureMode +GstCameraSrcViewfinderMode +GstCameraSrcAFReqMode +GstCameraCapturePhase +gst_camerasrc_add_color_channel +gst_camerasrc_clear_color_channels +gst_camerasrc_send_capture_start_message +gst_camerasrc_send_capture_stop_message +gst_camerasrc_get_caps_from_info +GST_CAMERA_SRC_MAX_SIZE + +GST_TYPE_CAMERA_SRC +GST_CAMERA_SRC +GST_CAMERA_SRC_CLASS +GST_CAMERA_SRC_GET_CLASS +GST_IS_CAMERA_SRC +GST_IS_CAMERA_SRC_CLASS +GST_CAMERA_SRC_CAST +GST_CAMERA_SRC_CLASS_CAST +GST_TYPE_CAMERA_SRC_CAPTURE_MODE +GST_TYPE_CAMERA_SRC_VIEWFINDER_MODE +gst_camerasrc_get_type +
+ +
+gstcameracolorbalance +GstColorBalanceChannel interface implementation +GstCameraSrcColorBalanceChannel +GstCameraSrcColorBalanceChannelClass +gst_camerasrc_color_balance_list_channels +gst_camerasrc_color_balance_set_value +gst_camerasrc_color_balance_get_value +GST_IMPLEMENT_CAMERA_SRC_COLOR_BALANCE_METHODS + +GST_CAMERA_SRC_COLOR_BALANCE_CHANNEL +GST_IS_CAMERA_SRC_COLOR_BALANCE_CHANNEL +GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL +gst_camerasrc_color_balance_channel_get_type +GST_CAMERA_SRC_COLOR_BALANCE_CHANNEL_CLASS +GST_IS_CAMERA_SRC_COLOR_BALANCE_CHANNEL_CLASS +
+ +
+gstcameraphotoiface +GstPhotography interface implementation +gst_camerasrc_photo_set_ev_compensation +gst_camerasrc_photo_get_ev_compensation +gst_camerasrc_photo_set_iso_speed +gst_camerasrc_photo_get_iso_speed +gst_camerasrc_photo_set_aperture +gst_camerasrc_photo_get_aperture +gst_camerasrc_photo_set_exposure +gst_camerasrc_photo_get_exposure +gst_camerasrc_photo_set_wb_mode +gst_camerasrc_photo_get_wb_mode +gst_camerasrc_photo_set_tone_mode +gst_camerasrc_photo_get_tone_mode +gst_camerasrc_photo_set_scene_mode +gst_camerasrc_photo_get_scene_mode +gst_camerasrc_photo_set_flash_mode +gst_camerasrc_photo_get_flash_mode +gst_camerasrc_photo_set_zoom +gst_camerasrc_photo_get_zoom +gst_camerasrc_photo_set_flicker_mode +gst_camerasrc_photo_get_flicker_mode +gst_camerasrc_photo_set_focus_mode +gst_camerasrc_photo_get_focus_mode +gst_camerasrc_photo_set_autofocus +gst_camerasrc_photo_get_capabilities +gst_camerasrc_photo_prepare_for_capture +gst_camerasrc_photo_ready_for_capture +gst_camerasrc_photo_set_config +gst_camerasrc_photo_get_config +gst_camerasrc_photo_set_format +gst_camerasrc_photo_get_format +gst_camerasrc_photo_set_property +gst_camerasrc_photo_get_property +GST_CAMERA_SRC_PHOTO_FUNCS +GST_IMPLEMENT_CAMERA_SRC_PHOTO_METHODS +
+ + +
+element-mfldv4l2camsrc +MFLD v4l2camsrc element +GstMFLDV4l2CamSrc +GstMFLDV4l2CamSrcClass +
+ +
+v4l2camsrc_calls +MFLDv4l2NewCamSrc V4L2 API calls +GstV4l2Buffer +GstMFLDV4l2CamSrcBufferPool +
diff --git a/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc.types b/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc.types new file mode 100644 index 0000000..6f45783 --- /dev/null +++ b/docs/reference/gst-mfld-videosrc/gst-mfld-videosrc.types @@ -0,0 +1,6 @@ +#include +#include "gstmfldcamerasrc.h" +#include "gstmfldcameracolorbalance.h" + +gst_camerasrc_get_type +gst_camerasrc_color_balance_channel_get_type diff --git a/gst-libs/Makefile.am b/gst-libs/Makefile.am index 062cb55..00d04e4 100644 --- a/gst-libs/Makefile.am +++ b/gst-libs/Makefile.am @@ -1 +1 @@ -SUBDIRS = gst +SUBDIRS = atomisphal gst diff --git a/gst-libs/atomisphal/Makefile.am b/gst-libs/atomisphal/Makefile.am new file mode 100644 index 0000000..0369816 --- /dev/null +++ b/gst-libs/atomisphal/Makefile.am @@ -0,0 +1,25 @@ +lib_LTLIBRARIES = libgstatomisphal-@GST_MAJORMINOR@.la + +libgstatomisphal_@GST_MAJORMINOR@includedir = $(includedir)/ + +libgstatomisphal_@GST_MAJORMINOR@_la_SOURCES = mfld_cam.c \ + mfld_driver.c \ + gstv4l2mfldadvci.c + +libgstatomisphal_@GST_MAJORMINOR@include_HEADERS = mfld_cam.h \ + mfld_cam_defs.h \ + gstv4l2mfldadvci.h \ + atomisp_v4l2.h \ + atomisp.h \ + sh_css_types.h + +libgstatomisphal_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS)\ + -DSTDC99 -D__user= \ + $(LIBMFLDADVCI_CFLAGS) + +libgstatomisphal_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) + +libgstatomisphal_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LDFLAGS)\ + $(LIBMFLDADVCI_LIBS) + +noinst_HEADERS = mfld_driver.h diff --git a/gst-libs/atomisphal/atomisp.h b/gst-libs/atomisphal/atomisp.h new file mode 100644 index 0000000..6b60516 --- /dev/null +++ b/gst-libs/atomisphal/atomisp.h @@ -0,0 +1,751 @@ +/* + * Support for Medifield PNW Camera Imaging ISP subsystem. + * + * Copyright (c) 2010 Intel Corporation. All Rights Reserved. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License version + * 2 as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA + * 02110-1301, USA. + * + */ + +#ifndef _ATOM_ISP_H +#define _ATOM_ISP_H + +#include +#include /* for size_t */ + +/*ISP binary running mode*/ +#define CI_MODE_PREVIEW 0x8000 +#define CI_MODE_VIDEO 0x4000 +#define CI_MODE_STILL_CAPTURE 0x2000 +#define CI_MODE_CONTINUOUS 0x1000 +#define CI_MODE_NONE 0x0000 + +#define OUTPUT_MODE_FILE 0x0100 +#define OUTPUT_MODE_TEXT 0x0200 + +/* Configuration used by Bayer noise reduction and YCC noise reduction */ +struct atomisp_nr_config { + /* [gain] Strength of noise reduction for Bayer NR (Used by Bayer NR) */ + unsigned int bnr_gain; + /* [gain] Strength of noise reduction for YCC NR (Used by YCC NR) */ + unsigned int ynr_gain; + /* [intensity] Sensitivity of Edge (Used by Bayer NR) */ + unsigned int direction; + /* [intensity] coring threshold for Cb (Used by YCC NR) */ + unsigned int threshold_cb; + /* [intensity] coring threshold for Cr (Used by YCC NR) */ + unsigned int threshold_cr; +}; + +/* Temporal noise reduction configuration */ +struct atomisp_tnr_config { + unsigned int gain; /* [gain] Strength of NR */ + unsigned int threshold_y;/* [intensity] Motion sensitivity for Y */ + unsigned int threshold_uv;/* [intensity] Motion sensitivity for U/V */ +}; + +/* Histogram. This contains num_elements values of type unsigned int. + * The data pointer is a DDR pointer (virtual address). + */ +struct atomisp_histogram { + unsigned int num_elements; + void __user *data; +}; + +enum atomisp_ob_mode { + atomisp_ob_mode_none, + atomisp_ob_mode_fixed, + atomisp_ob_mode_raster +}; + +/* Optical black level configuration */ +struct atomisp_ob_config { + /* Obtical black level mode (Fixed / Raster) */ + enum atomisp_ob_mode mode; + /* [intensity] optical black level for GR (relevant for fixed mode) */ + unsigned int level_gr; + /* [intensity] optical black level for R (relevant for fixed mode) */ + unsigned int level_r; + /* [intensity] optical black level for B (relevant for fixed mode) */ + unsigned int level_b; + /* [intensity] optical black level for GB (relevant for fixed mode) */ + unsigned int level_gb; + /* [BQ] 0..63 start position of OB area (relevant for raster mode) */ + unsigned short start_position; + /* [BQ] start..63 end position of OB area (relevant for raster mode) */ + unsigned short end_position; +}; + +/* Edge enhancement (sharpen) configuration */ +struct atomisp_ee_config { + /* [gain] The strength of sharpness. u5_11 */ + unsigned int gain; + /* [intensity] The threshold that divides noises from edge. u8_8 */ + unsigned int threshold; + /* [gain] The strength of sharpness in pell-mell area. u5_11 */ + unsigned int detail_gain; +}; + +struct atomisp_3a_output { + int ae_y; + int awb_cnt; + int awb_gr; + int awb_r; + int awb_b; + int awb_gb; + int af_hpf1; + int af_hpf2; +}; + +enum atomisp_calibration_type { + calibration_type1, + calibration_type2, + calibration_type3 +}; + +struct atomisp_calibration_group { + unsigned int size; + unsigned int type; + unsigned short *calb_grp_values; +}; + +struct atomisp_gc_config { + __u16 gain_k1; + __u16 gain_k2; +}; + +struct atomisp_3a_config { + unsigned int ae_y_coef_r; /* [gain] Weight of R for Y */ + unsigned int ae_y_coef_g; /* [gain] Weight of G for Y */ + unsigned int ae_y_coef_b; /* [gain] Weight of B for Y */ + unsigned int awb_lg_high_raw; /* [intensity] + AWB level gate high for raw */ + unsigned int awb_lg_low; /* [intensity] AWB level gate low */ + unsigned int awb_lg_high; /* [intensity] AWB level gate high */ + int af_fir1_coef[7]; /* [factor] AF FIR coefficients of fir1 */ + int af_fir2_coef[7]; /* [factor] AF FIR coefficients of fir2 */ +}; + +/* structure that describes the 3A and DIS grids shared with 3A lib*/ +struct atomisp_grid_info { + /* ISP input size that is visible for user */ + unsigned int isp_in_width; + unsigned int isp_in_height; + /* 3A statistics grid: */ + unsigned int s3a_width; + unsigned int s3a_height; + unsigned int s3a_bqs_per_grid_cell; + /* DIS grid: */ + unsigned int dis_width; /* also used for vertical projections */ + unsigned int dis_aligned_width; + unsigned int dis_height; /* also used for horizontal projections */ + unsigned int dis_aligned_height; + unsigned int dis_bqs_per_grid_cell; + unsigned int dis_hor_coef_num; + unsigned int dis_ver_coef_num; +}; + +struct atomisp_dis_vector { + int x; + int y; +}; + +struct atomisp_dis_coefficients { + struct atomisp_grid_info grid_info; + short __user *vertical_coefficients; + short __user *horizontal_coefficients; +}; + +struct atomisp_dis_statistics { + struct atomisp_grid_info grid_info; + int __user *vertical_projections; + int __user *horizontal_projections; +}; + +struct atomisp_3a_statistics { + struct atomisp_grid_info grid_info; + struct atomisp_3a_output __user *data; +}; + +/* White Balance (Gain Adjust) */ +struct atomisp_wb_config { + unsigned int integer_bits; + unsigned int gr; /* unsigned .<16-integer_bits> */ + unsigned int r; /* unsigned .<16-integer_bits> */ + unsigned int b; /* unsigned .<16-integer_bits> */ + unsigned int gb; /* unsigned .<16-integer_bits> */ +}; + +/* Color Space Conversion settings */ +struct atomisp_cc_config { + unsigned int fraction_bits; + int matrix[3 * 3]; /* RGB2YUV Color matrix, signed + <13-fraction_bits>. */ +}; + +/* De pixel noise configuration */ +struct atomisp_de_config { + unsigned int pixelnoise; + unsigned int c1_coring_threshold; + unsigned int c2_coring_threshold; +}; + +/* Chroma enhancement */ +struct atomisp_ce_config { + unsigned int uv_level_min; + unsigned int uv_level_max; +}; + +/* Defect pixel correction configuration */ +struct atomisp_dp_config { + /* [intensity] The threshold of defect Pixel Correction, representing + * the permissible difference of intensity between one pixel and its + * surrounding pixels. Smaller values result in more frequent pixel + * corrections. u0_16 + */ + unsigned int threshold; + /* [gain] The sensitivity of mis-correction. ISP will miss a lot of + * defects if the value is set too large. u8_8 + */ + unsigned int gain; +}; + +struct atomisp_parm { + struct atomisp_grid_info info; + struct atomisp_wb_config wb_config; + struct atomisp_cc_config cc_config; + struct atomisp_ob_config ob_config; + struct atomisp_de_config de_config; + struct atomisp_ce_config ce_config; + struct atomisp_dp_config dp_config; + struct atomisp_nr_config nr_config; + struct atomisp_ee_config ee_config; + struct atomisp_tnr_config tnr_config; +}; + +#define ATOMISP_GAMMA_TABLE_SIZE 1024 +struct atomisp_gamma_table { + unsigned short data[ATOMISP_GAMMA_TABLE_SIZE]; +}; + +/* Morphing table for advanced ISP. + * Each line of width elements takes up COORD_TABLE_EXT_WIDTH elements + * in memory. + */ +#define ATOMISP_MORPH_TABLE_NUM_PLANES 6 +struct atomisp_morph_table { + unsigned int height; + unsigned int width; /* number of valid elements per line */ + unsigned short __user *coordinates_x[ATOMISP_MORPH_TABLE_NUM_PLANES]; + unsigned short __user *coordinates_y[ATOMISP_MORPH_TABLE_NUM_PLANES]; +}; + +#define ATOMISP_NUM_SC_COLORS 4 +#define ATOMISP_SC_FLAG_QUERY (1 << 0) + +struct atomisp_shading_table { + /* + * If flag ATOMISP_SC_FLAG_QUERY is set, IOCTL will only query current + * LSC status and return, otherwise it will set LSC according to + * userspace's input. + */ + __u8 flags; + /* + * If ATOMISP_SC_FLAG_QUERY is set, enable is output parameter, + * otherwise it is an input parameter and will enable/disable LSC + * engine + */ + __u8 enable; + /* native sensor resolution */ + __u32 sensor_width; + __u32 sensor_height; + /* number of data points per line per color (bayer quads) */ + __u32 width; + /* number of lines of data points per color (bayer quads) */ + __u32 height; + /* bits of fraction part for shading table values */ + __u32 fraction_bits; + /* one table for each color (use sh_css_sc_color to index) */ + __u16 __user *data[ATOMISP_NUM_SC_COLORS]; +}; + +struct atomisp_makernote_info { + /* bits 31-16: numerator, bits 15-0: denominator */ + unsigned int focal_length; + /* bits 31-16: numerator, bits 15-0: denominator*/ + unsigned int f_number_curr; + /* + * bits 31-24: max f-number numerator + * bits 23-16: max f-number denominator + * bits 15-8: min f-number numerator + * bits 7-0: min f-number denominator + */ + unsigned int f_number_range; +}; + +/* parameter for MACC */ +#define ATOMISP_NUM_MACC_AXES 16 +struct atomisp_macc_table { + short data[4 * ATOMISP_NUM_MACC_AXES]; +}; + +struct atomisp_macc_config { + int color_effect; + struct atomisp_macc_table table; +}; + +/* Parameter for ctc parameter control */ +#define ATOMISP_CTC_TABLE_SIZE 1024 +struct atomisp_ctc_table { + unsigned short data[ATOMISP_CTC_TABLE_SIZE]; +}; + +/* Parameter for overlay image loading */ +struct atomisp_overlay { + /* the frame containing the overlay data The overlay frame width should + * be the multiples of 2*ISP_VEC_NELEMS. The overlay frame height + * should be the multiples of 2. + */ + struct v4l2_framebuffer *frame; + /* Y value of overlay background */ + unsigned char bg_y; + /* U value of overlay background */ + char bg_u; + /* V value of overlay background */ + char bg_v; + /* the blending percent of input data for Y subpixels */ + unsigned char blend_input_perc_y; + /* the blending percent of input data for U subpixels */ + unsigned char blend_input_perc_u; + /* the blending percent of input data for V subpixels */ + unsigned char blend_input_perc_v; + /* the blending percent of overlay data for Y subpixels */ + unsigned char blend_overlay_perc_y; + /* the blending percent of overlay data for U subpixels */ + unsigned char blend_overlay_perc_u; + /* the blending percent of overlay data for V subpixels */ + unsigned char blend_overlay_perc_v; + /* the overlay start x pixel position on output frame It should be the + multiples of 2*ISP_VEC_NELEMS. */ + unsigned int overlay_start_x; + /* the overlay start y pixel position on output frame It should be the + multiples of 2. */ + unsigned int overlay_start_y; +}; + +/* Sensor resolution specific data for AE calculation. + * This contains sensor specific data, so we simply use an array of 64 + * bytes. */ +struct atomisp_sensor_mode_data { + unsigned char data[64]; +}; + +struct atomisp_exposure { + unsigned int integration_time[8]; + unsigned int shutter_speed[8]; + unsigned int gain[4]; + unsigned int aperture; +}; + +/* For texture streaming. */ +struct atomisp_bc_video_package { + int ioctl_cmd; + int device_id; + int inputparam; + int outputparam; +}; + +enum atomisp_focus_hp { + ATOMISP_FOCUS_HP_IN_PROGRESS = (1U << 2), + ATOMISP_FOCUS_HP_COMPLETE = (2U << 2), + ATOMISP_FOCUS_HP_FAILED = (3U << 2) +}; + +/* Masks */ +#define ATOMISP_FOCUS_STATUS_MOVING (1U << 0) +#define ATOMISP_FOCUS_STATUS_ACCEPTS_NEW_MOVE (1U << 1) +#define ATOMISP_FOCUS_STATUS_HOME_POSITION (3U << 2) + +enum atomisp_camera_port { + ATOMISP_CAMERA_PORT_SECONDARY, + ATOMISP_CAMERA_PORT_PRIMARY, +}; + +/* Flash modes. Default is off. + * Setting a flash to TORCH or INDICATOR mode will automatically + * turn it on. Setting it to FLASH mode will not turn on the flash + * until the FLASH_STROBE command is sent. */ +enum atomisp_flash_mode { + ATOMISP_FLASH_MODE_OFF, + ATOMISP_FLASH_MODE_FLASH, + ATOMISP_FLASH_MODE_TORCH, + ATOMISP_FLASH_MODE_INDICATOR, +}; + +/* Flash statuses, used by atomisp driver to check before starting + * flash and after having started flash. */ +enum atomisp_flash_status { + ATOMISP_FLASH_STATUS_OK, + ATOMISP_FLASH_STATUS_HW_ERROR, + ATOMISP_FLASH_STATUS_INTERRUPTED, + ATOMISP_FLASH_STATUS_TIMEOUT, +}; + +/* Frame status. This is used to detect corrupted frames and flash + * exposed frames. Usually, the first 2 frames coming out of the sensor + * are corrupted. When using flash, the frame before and the frame after + * the flash exposed frame may be partially exposed by flash. The ISP + * statistics for these frames should not be used by the 3A library. + * The frame status value can be found in the "reserved" field in the + * v4l2_buffer struct. */ +enum atomisp_frame_status { + ATOMISP_FRAME_STATUS_OK, + ATOMISP_FRAME_STATUS_CORRUPTED, + ATOMISP_FRAME_STATUS_FLASH_EXPOSED, + ATOMISP_FRAME_STATUS_FLASH_PARTIAL, + ATOMISP_FRAME_STATUS_FLASH_FAILED, +}; + +enum atomisp_acc_type { + ATOMISP_ACC_STANDALONE, /* Stand-alone acceleration */ + ATOMISP_ACC_OUTPUT, /* Accelerator stage on output frame */ + ATOMISP_ACC_VIEWFINDER /* Accelerator stage on viewfinder frame */ +}; + +enum atomisp_acc_arg_type { + ATOMISP_ACC_ARG_SCALAR_IN, /* Scalar input argument */ + ATOMISP_ACC_ARG_SCALAR_OUT, /* Scalar output argument */ + ATOMISP_ACC_ARG_SCALAR_IO, /* Scalar in/output argument */ + ATOMISP_ACC_ARG_PTR_IN, /* Pointer input argument */ + ATOMISP_ACC_ARG_PTR_OUT, /* Pointer output argument */ + ATOMISP_ACC_ARG_PTR_IO, /* Pointer in/output argument */ + ATOMISP_ARG_PTR_NOFLUSH, /* Pointer argument will not be flushed */ + ATOMISP_ARG_PTR_STABLE, /* Pointer input argument that is stable */ + ATOMISP_ACC_ARG_FRAME /* Frame argument */ +}; + +enum { + ATOMISP_IOC_SIGNAL_SOF, /* Start Of Frame */ + ATOMISP_IOC_SIGNAL_EOF, /* End Of Frame */ +}; + +struct atomisp_sp_arg { + enum atomisp_acc_arg_type type; /* Type of SP argument */ + void *value; /* Value of SP argument */ + unsigned int size; /* Size of SP argument */ +}; + +/* Acceleration API */ +struct atomisp_acc_fw_arg { + unsigned int fw_handle; + unsigned int index; + void __user *value; + size_t size; +}; + +struct atomisp_acc_fw_abort { + unsigned int fw_handle; + /* Timeout in us */ + unsigned int timeout; +}; + +struct atomisp_acc_fw_load { + unsigned int size; + unsigned int fw_handle; + void __user *data; +}; + +/* + * V4L2 private internal data interface. + * ----------------------------------------------------------------------------- + * struct v4l2_private_int_data - request private data stored in video device + * internal memory. + * @size: sanity check to ensure userspace's buffer fits whole private data. + * If not, kernel will make partial copy (or nothing if @size == 0). + * @size is always corrected for the minimum necessary if IOCTL returns + * no error. + * @data: pointer to userspace buffer. + */ +struct v4l2_private_int_data { + __u32 size; + void __user *data; + __u32 reserved[2]; +}; + +/*Private IOCTLs for ISP */ +#define ATOMISP_IOC_G_XNR \ + _IOR('v', BASE_VIDIOC_PRIVATE + 0, int) +#define ATOMISP_IOC_S_XNR \ + _IOW('v', BASE_VIDIOC_PRIVATE + 1, int) +#define ATOMISP_IOC_G_NR \ + _IOR('v', BASE_VIDIOC_PRIVATE + 2, struct atomisp_nr_config) +#define ATOMISP_IOC_S_NR \ + _IOW('v', BASE_VIDIOC_PRIVATE + 3, struct atomisp_nr_config) +#define ATOMISP_IOC_G_TNR \ + _IOR('v', BASE_VIDIOC_PRIVATE + 4, struct atomisp_tnr_config) +#define ATOMISP_IOC_S_TNR \ + _IOW('v', BASE_VIDIOC_PRIVATE + 5, struct atomisp_tnr_config) +#define ATOMISP_IOC_G_HISTOGRAM \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 6, struct atomisp_histogram) +#define ATOMISP_IOC_S_HISTOGRAM \ + _IOW('v', BASE_VIDIOC_PRIVATE + 7, struct atomisp_histogram) +#define ATOMISP_IOC_G_BLACK_LEVEL_COMP \ + _IOR('v', BASE_VIDIOC_PRIVATE + 8, struct atomisp_ob_config) +#define ATOMISP_IOC_S_BLACK_LEVEL_COMP \ + _IOW('v', BASE_VIDIOC_PRIVATE + 9, struct atomisp_ob_config) +#define ATOMISP_IOC_G_EE \ + _IOR('v', BASE_VIDIOC_PRIVATE + 12, struct atomisp_ee_config) +#define ATOMISP_IOC_S_EE \ + _IOW('v', BASE_VIDIOC_PRIVATE + 13, struct atomisp_ee_config) +/* Digital Image Stabilization: + * 1. get dis statistics: reads DIS statistics from ISP (every frame) + * 2. set dis coefficients: set DIS filter coefficients (one time) + * 3. set dis motion vecotr: set motion vector (result of DIS, every frame) + */ +#define ATOMISP_IOC_G_DIS_STAT \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 14, struct atomisp_dis_statistics) +#define ATOMISP_IOC_S_DIS_COEFS \ + _IOW('v', BASE_VIDIOC_PRIVATE + 15, struct atomisp_dis_coefficients) +#define ATOMISP_IOC_S_DIS_VECTOR \ + _IOW('v', BASE_VIDIOC_PRIVATE + 16, struct atomisp_dis_vector) + +#define ATOMISP_IOC_G_3A_STAT \ + _IOW('v', BASE_VIDIOC_PRIVATE + 17, struct atomisp_3a_statistics) +#define ATOMISP_IOC_G_ISP_PARM \ + _IOR('v', BASE_VIDIOC_PRIVATE + 18, struct atomisp_parm) +#define ATOMISP_IOC_S_ISP_PARM \ + _IOW('v', BASE_VIDIOC_PRIVATE + 19, struct atomisp_parm) +#define ATOMISP_IOC_G_ISP_GAMMA \ + _IOR('v', BASE_VIDIOC_PRIVATE + 20, struct atomisp_gamma_table) +#define ATOMISP_IOC_S_ISP_GAMMA \ + _IOW('v', BASE_VIDIOC_PRIVATE + 21, struct atomisp_gamma_table) +#define ATOMISP_IOC_G_ISP_GDC_TAB \ + _IOR('v', BASE_VIDIOC_PRIVATE + 22, struct atomisp_morph_table) +#define ATOMISP_IOC_S_ISP_GDC_TAB \ + _IOW('v', BASE_VIDIOC_PRIVATE + 23, struct atomisp_morph_table) +#define ATOMISP_IOC_ISP_MAKERNOTE \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 24, struct atomisp_makernote_info) + +/* macc parameter control*/ +#define ATOMISP_IOC_G_ISP_MACC \ + _IOR('v', BASE_VIDIOC_PRIVATE + 25, struct atomisp_macc_config) +#define ATOMISP_IOC_S_ISP_MACC \ + _IOW('v', BASE_VIDIOC_PRIVATE + 26, struct atomisp_macc_config) + +/* Defect pixel detection & Correction */ +#define ATOMISP_IOC_G_ISP_BAD_PIXEL_DETECTION \ + _IOR('v', BASE_VIDIOC_PRIVATE + 27, struct atomisp_dp_config) +#define ATOMISP_IOC_S_ISP_BAD_PIXEL_DETECTION \ + _IOW('v', BASE_VIDIOC_PRIVATE + 28, struct atomisp_dp_config) + +/* False Color Correction */ +#define ATOMISP_IOC_G_ISP_FALSE_COLOR_CORRECTION \ + _IOR('v', BASE_VIDIOC_PRIVATE + 29, struct atomisp_de_config) +#define ATOMISP_IOC_S_ISP_FALSE_COLOR_CORRECTION \ + _IOW('v', BASE_VIDIOC_PRIVATE + 30, struct atomisp_de_config) + +/* ctc parameter control */ +#define ATOMISP_IOC_G_ISP_CTC \ + _IOR('v', BASE_VIDIOC_PRIVATE + 31, struct atomisp_ctc_table) +#define ATOMISP_IOC_S_ISP_CTC \ + _IOW('v', BASE_VIDIOC_PRIVATE + 32, struct atomisp_ctc_table) + +/* white balance Correction */ +#define ATOMISP_IOC_G_ISP_WHITE_BALANCE \ + _IOR('v', BASE_VIDIOC_PRIVATE + 33, struct atomisp_wb_config) +#define ATOMISP_IOC_S_ISP_WHITE_BALANCE \ + _IOW('v', BASE_VIDIOC_PRIVATE + 34, struct atomisp_wb_config) + +/* fpn table loading */ +#define ATOMISP_IOC_S_ISP_FPN_TABLE \ + _IOW('v', BASE_VIDIOC_PRIVATE + 35, struct v4l2_framebuffer) + +/* overlay image loading */ +#define ATOMISP_IOC_G_ISP_OVERLAY \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 36, struct atomisp_overlay) +#define ATOMISP_IOC_S_ISP_OVERLAY \ + _IOW('v', BASE_VIDIOC_PRIVATE + 37, struct atomisp_overlay) + +/* bcd driver bridge */ +#define ATOMISP_IOC_CAMERA_BRIDGE \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 38, struct atomisp_bc_video_package) + +/* Sensor resolution specific info for AE */ +#define ATOMISP_IOC_G_SENSOR_MODE_DATA \ + _IOR('v', BASE_VIDIOC_PRIVATE + 39, struct atomisp_sensor_mode_data) + +#define ATOMISP_IOC_S_EXPOSURE \ + _IOW('v', BASE_VIDIOC_PRIVATE + 40, struct atomisp_exposure) + +/* sensor calibration registers group */ +#define ATOMISP_IOC_G_SENSOR_CALIBRATION_GROUP \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 41, struct atomisp_calibration_group) + +/* white balance Correction */ +#define ATOMISP_IOC_G_3A_CONFIG \ + _IOR('v', BASE_VIDIOC_PRIVATE + 42, struct atomisp_3a_config) +#define ATOMISP_IOC_S_3A_CONFIG \ + _IOW('v', BASE_VIDIOC_PRIVATE + 43, struct atomisp_3a_config) + +/* Accelerate ioctls */ +#define ATOMISP_IOC_ACC_LOAD \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 44, struct atomisp_acc_fw_load) + +#define ATOMISP_IOC_ACC_UNLOAD \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 45, unsigned int) + +#define ATOMISP_IOC_ACC_S_ARG \ + _IOW('v', BASE_VIDIOC_PRIVATE + 46, struct atomisp_acc_fw_arg) + +#define ATOMISP_IOC_ACC_START \ + _IOW('v', BASE_VIDIOC_PRIVATE + 47, unsigned int) + +#define ATOMISP_IOC_ACC_WAIT \ + _IOW('v', BASE_VIDIOC_PRIVATE + 48, unsigned int) + +#define ATOMISP_IOC_ACC_ABORT \ + _IOW('v', BASE_VIDIOC_PRIVATE + 49, struct atomisp_acc_fw_abort) + +/* sensor OTP memory read */ +#define ATOMISP_IOC_G_SENSOR_PRIV_INT_DATA \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 50, struct v4l2_private_int_data) + +/* LCS (shading) table write */ +#define ATOMISP_IOC_S_ISP_SHD_TAB \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 51, struct atomisp_shading_table) + +/* Gamma Correction */ +#define ATOMISP_IOC_G_ISP_GAMMA_CORRECTION \ + _IOR('v', BASE_VIDIOC_PRIVATE + 52, struct atomisp_gc_config) + +#define ATOMISP_IOC_S_ISP_GAMMA_CORRECTION \ + _IOW('v', BASE_VIDIOC_PRIVATE + 53, struct atomisp_gc_config) + +#define ATOMISP_IOC_ACC_DESTAB \ + _IOW('v', BASE_VIDIOC_PRIVATE + 54, struct atomisp_acc_fw_arg) + +/* + * Reserved ioctls. We have customer implementing it internally. + * We can't use both numbers to not cause ABI conflict. + * Anyway, those ioctls are hacks and not implemented by us: + * + * #define ATOMISP_IOC_G_SENSOR_REG \ + * _IOW('v', BASE_VIDIOC_PRIVATE + 55, struct atomisp_sensor_regs) + * #define ATOMISP_IOC_S_SENSOR_REG \ + * _IOW('v', BASE_VIDIOC_PRIVATE + 56, struct atomisp_sensor_regs) + */ + +/* motor internal memory read */ +#define ATOMISP_IOC_G_MOTOR_PRIV_INT_DATA \ + _IOWR('v', BASE_VIDIOC_PRIVATE + 57, struct v4l2_private_int_data) + +/* signal */ +#define ATOMISP_IOC_S_SIGNAL \ + _IOW('v', BASE_VIDIOC_PRIVATE + 58, int) + +#define ATOMISP_IOC_S_MIPI_IRQ \ + _IOW('v', BASE_VIDIOC_PRIVATE + 59, int) + +/* ISP Private control IDs */ +#define V4L2_CID_ATOMISP_BAD_PIXEL_DETECTION \ + (V4L2_CID_PRIVATE_BASE + 0) +#define V4L2_CID_ATOMISP_POSTPROCESS_GDC_CAC \ + (V4L2_CID_PRIVATE_BASE + 1) +#define V4L2_CID_ATOMISP_VIDEO_STABLIZATION \ + (V4L2_CID_PRIVATE_BASE + 2) +#define V4L2_CID_ATOMISP_FIXED_PATTERN_NR \ + (V4L2_CID_PRIVATE_BASE + 3) +#define V4L2_CID_ATOMISP_FALSE_COLOR_CORRECTION \ + (V4L2_CID_PRIVATE_BASE + 4) +#define V4L2_CID_ATOMISP_LOW_LIGHT \ + (V4L2_CID_PRIVATE_BASE + 5) + +/* Camera class: + * Exposure, Flash and privacy (indicator) light controls, to be upstreamed */ +#define V4L2_CID_CAMERA_LASTP1 (V4L2_CID_CAMERA_CLASS_BASE + 1024) + +#define V4L2_CID_FOCAL_ABSOLUTE (V4L2_CID_CAMERA_LASTP1 + 0) +#define V4L2_CID_FNUMBER_ABSOLUTE (V4L2_CID_CAMERA_LASTP1 + 1) +#define V4L2_CID_FNUMBER_RANGE (V4L2_CID_CAMERA_LASTP1 + 2) + +/* Flash related CIDs, see also: + * http://linuxtv.org/downloads/v4l-dvb-apis/extended-controls.html\ + * #flash-controls */ + +/* FLASH controls are redefined by atom ips + * undef those to get rif of warnings. + */ +#ifdef V4L2_CID_FLASH_INTENSITY +#undef V4L2_CID_FLASH_INTENSITY +#endif +#ifdef V4L2_CID_FLASH_TORCH_INTENSITY +#undef V4L2_CID_FLASH_TORCH_INTENSITY +#endif +#ifdef V4L2_CID_FLASH_INDICATOR_INTENSITY +#undef V4L2_CID_FLASH_INDICATOR_INTENSITY +#endif +#ifdef V4L2_CID_FLASH_TIMEOUT +#undef V4L2_CID_FLASH_TIMEOUT +#endif +#ifdef V4L2_CID_FLASH_STROBE +#undef V4L2_CID_FLASH_STROBE +#endif + + +/* Request a number of flash-exposed frames. The frame status can be + * found in the reserved field in the v4l2_buffer struct. */ +#define V4L2_CID_REQUEST_FLASH (V4L2_CID_CAMERA_LASTP1 + 3) +/* Flash intensity, in percentage. */ +#define V4L2_CID_FLASH_INTENSITY (V4L2_CID_CAMERA_LASTP1 + 4) +/* Query flash driver status. See enum atomisp_flash_status above. */ +#define V4L2_CID_FLASH_STATUS (V4L2_CID_CAMERA_LASTP1 + 5) +/* Torch intensity, in percentage. */ +#define V4L2_CID_FLASH_TORCH_INTENSITY (V4L2_CID_CAMERA_LASTP1 + 6) +/* Indicator intensity, in percentage. */ +#define V4L2_CID_FLASH_INDICATOR_INTENSITY (V4L2_CID_CAMERA_LASTP1 + 7) +/* Flash timeout (in ms). */ +#define V4L2_CID_FLASH_TIMEOUT (V4L2_CID_CAMERA_LASTP1 + 8) +/* Enable (1) or disable (0) the flash (only valid in FLASH mode). */ +#define V4L2_CID_FLASH_STROBE (V4L2_CID_CAMERA_LASTP1 + 9) +/* Set the flash mode (see enum atomisp_flash_mode) */ +#define V4L2_CID_FLASH_MODE (V4L2_CID_CAMERA_LASTP1 + 10) + +/* VCM slew control */ +#define V4L2_CID_VCM_SLEW (V4L2_CID_CAMERA_LASTP1 + 11) +/* VCM step time */ +#define V4L2_CID_VCM_TIMEING (V4L2_CID_CAMERA_LASTP1 + 12) +/* sensor test pattern */ +#define V4L2_CID_TEST_PATTERN (V4L2_CID_CAMERA_LASTP1 + 13) + +/* Query Focus Status */ +#define V4L2_CID_FOCUS_STATUS (V4L2_CID_CAMERA_LASTP1 + 14) + +/* Query sensor's binning factor */ +#define V4L2_CID_BIN_FACTOR_HORZ (V4L2_CID_CAMERA_LASTP1 + 15) +#define V4L2_CID_BIN_FACTOR_VERT (V4L2_CID_CAMERA_LASTP1 + 16) + +/* number of frames to skip at stream start */ +#define V4L2_CID_G_SKIP_FRAMES (V4L2_CID_CAMERA_LASTP1 + 17) + +#define V4L2_BUF_FLAG_BUFFER_INVALID 0x0400 +#define V4L2_BUF_FLAG_BUFFER_VALID 0x0800 + +/* Nonstandard color effects for V4L2_CID_COLORFX */ +enum { + V4L2_COLORFX_SKIN_WHITEN_LOW = 1001, + V4L2_COLORFX_SKIN_WHITEN_HIGH = 1002, +}; + +#endif /* _ATOM_ISP_H */ diff --git a/gst-libs/atomisphal/atomisp_v4l2.h b/gst-libs/atomisphal/atomisp_v4l2.h new file mode 100644 index 0000000..58f46a3 --- /dev/null +++ b/gst-libs/atomisphal/atomisp_v4l2.h @@ -0,0 +1,58 @@ +/* + * Support for Medifield PNW Camera Imaging ISP subsystem. + * + * Copyright (c) 2010 Intel Corporation. All Rights Reserved. + * + * Copyright (c) 2010 Silicon Hive www.siliconhive.com. + * + * This program is free software; you can redistribute it and/or + * modify it under the terms of the GNU General Public License version + * 2 as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA + * 02110-1301, USA. + * + */ + +#ifndef ATOMISP_V4L2_H_ +#define ATOMISP_V4L2_H_ + +#include + +/* SH header files */ +#include + +#define ATOMISP_MAJOR 0 +#define ATOMISP_MINOR 5 +#define ATOMISP_PATCHLEVEL 1 + +#define DRIVER_VERSION_STR __stringify(ATOMISP_MAJOR) \ + "." __stringify(ATOMISP_MINOR) "." __stringify(ATOMISP_PATCHLEVEL) +#define DRIVER_VERSION KERNEL_VERSION(ATOMISP_MAJOR, \ + ATOMISP_MINOR, ATOMISP_PATCHLEVEL) + +/*ISP binary running mode*/ +#define CI_MODE_PREVIEW 0x8000 +#define CI_MODE_VIDEO 0x4000 +#define CI_MODE_STILL_CAPTURE 0x2000 +#define CI_MODE_NONE 0x0000 + +#define ATOM_ISP_STEP_WIDTH 2 +#define ATOM_ISP_STEP_HEIGHT 2 + +#define ATOM_ISP_MIN_WIDTH 256 +#define ATOM_ISP_MIN_HEIGHT 2 +#define ATOM_ISP_MAX_WIDTH 4352 +#define ATOM_ISP_MAX_HEIGHT 3264 + +#define ATOM_ISP_MAX_WIDTH_TMP 1280 +#define ATOM_ISP_MAX_HEIGHT_TMP 720 + +#endif /* ATOMISP_V4L2_H_ */ diff --git a/gst-libs/atomisphal/gstv4l2mfldadvci.c b/gst-libs/atomisphal/gstv4l2mfldadvci.c new file mode 100644 index 0000000..e8cc2c3 --- /dev/null +++ b/gst-libs/atomisphal/gstv4l2mfldadvci.c @@ -0,0 +1,433 @@ +/* GStreamer MFLD ADVIC API + * Copyright (C) 2010 Intel Corporation + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#include "gstv4l2mfldadvci.h" +#include +#include +#include + +static void wrapper_default_void (void) +{ + return; +} + +static int wrapper_default_int_void (void) +{ + return 0; +} + +static void wrapper_default_ae (void) +{ + return; +} + +static void wrapper_default_init(void) +{ +} + +static void wrapper_default_uninit(void) +{ +} + +static void wrapper_default_enable(gboolean enable) +{ +} + +static void +wrapper_default_do_redeye_removal (advci_frame_t *frame) +{ +} + +static void +wrapper_default_AeSetFlickerMode(advci_ae_flicker_mode mode) +{ +} + +static advci_ae_flicker_mode +wrapper_default_AeGetFlickerMode(void) +{ + +return advci_ae_flicker_mode_off; +} + + +static void +wrapper_default_AeSetExposureProgram(advci_ae_exposure_program program) +{ +} + +static void +wrapper_default_AeGetExposureProgram(advci_ae_exposure_program *program) +{ +} + +static void wrapper_default_AfSetMode(advci_af_mode mode) +{ +} + +static void wrapper_default_AeSetMode(advci_ae_mode mode) +{ +} +static void wrapper_default_AeGetMode(advci_ae_mode *mode) +{ + +} +static advci_af_mode wrapper_default_AfGetMode() +{ +return advci_af_mode_auto; +} + +static void wrapper_default_AfSetRange(advci_af_range range) +{ +} + +static advci_af_range wrapper_default_AfGetRange() +{ + return advci_af_range_norm; +} + +static void wrapper_default_AwbSetMode(advci_awb_mode mode) +{ +} + +static advci_awb_mode wrapper_default_AwbGetMode() +{ + return advci_awb_mode_auto; +} + +static void wrapper_default_AwbSetLightSource(advci_awb_light_source ls) +{ +} + +static advci_awb_light_source +wrapper_default_AwbGetLightSource(void) +{ +return advci_awb_light_source_other; +} + +static ci_adv_Err +wrapper_default_int_int (int i) +{ + return 0; +} + +static ci_adv_Err +wrapper_default_intp (int *p) +{ + return 0; +} + +static int +wrapper_default_isp_set_fd (const char *sensor_id,int fd, const char *file) +{ + return SENSOR_TYPE_SOC; +} + +static void +wrapper_default_switch_mode (advci_isp_mode mode, float frame_rate) +{ +} + +static ci_adv_Err +wrapper_default_AeGetWindowsNum(int *num) +{ + return ci_adv_Success; +} + +static void wrapper_default_AwbVersion(int * major, int * minor) +{ +} + +static void wrapper_default_AeVersion(int * major, int * minor) +{ +} + +static void wrapper_default_AfVersion(int * major, int * minor) +{ +} + +static advci_af_status wrapper_default_AfGetStillAfStatus() +{ + return advci_af_status_busy; +} + +static void wrapper_default_RerVersion(int * major, int * minor) +{ + return; +} + +static ci_adv_Err wrapper_default_AeGetManualShutter(float *time) +{ + return ci_adv_Success; +} + +static int wrapper_default_AeGetManualAperture() +{ +return 1; +} + +static advci_ae_flash_mode +wrapper_default_AeGetFlashMode(void) +{ + return advci_ae_flash_mode_off; +} + +static void wrapper_default_AeSetFlashMode(advci_ae_flash_mode mode) +{ +} + +static void wrapper_default_AwbGetManualColorTemperature() +{ +} + +static void +wrapper_default_AeSetMeteringMode(advci_ae_metering_mode mode) +{ +} + +static advci_ae_metering_mode +wrapper_default_AeGetMeteringMode(void) +{ +return advci_ae_metering_mode_auto; +} + +static void +wrapper_default_AfSetMeteringMode(advci_af_metering_mode mode) +{ +} + +static void +wrapper_default_AfGetMeteringMode(advci_af_metering_mode *mode) +{ +} + +static void +wrapper_default_Ae_GetWindow(advci_window *window) +{ + +} +static void +wrapper_default_Ae_SetWindow(advci_window *window) +{ +} + +static void +wrapper_default_Af_SetWindows(int count, advci_window *window) +{ +} + +static void wrapper_default_af_start( ) +{ +} + +static void wrapper_default_af_stop( ) +{ +} +static void wrapper_default_process_flash(advci_flash_stage stage ) +{ +} + +static void wrapper_default_AeAfAwb_process( gboolean process, struct timeval *frame_timestamp) +{ +} +static void wrapper_default_AwbSetImageEffect(advci_image_effect effect) +{ +} + +static advci_image_effect +wrapper_default_AwbGetImageEffect(void) +{ +return advci_image_effect_none; +} + +static void +wrapper_default_is_flash_needed(gboolean *useflash) +{ + +} +static void +wrapper_default_af_assist_light_needed(gboolean *need) +{ +} + +static ci_adv_Err wrapper_default_AeSetManualIso(float sensitivity) +{ + return 0; +} + +static ci_adv_Err wrapper_default_AeGetManualIso(float *sensitivity) +{ + return 0; +} + +/*------------------------------------------------------------------------------------------------*/ +void +lib_3a_link_functions_init (GstV4l2MFLDAdvCI *mfldadvci, GModule *mod) +{ + + g_module_symbol (mod, "ci_adv_af_set_mode", (gpointer) &mfldadvci->AfSetMode); + g_module_symbol (mod, "ia_3a_af_get_focus_mode", (gpointer) &mfldadvci->AfGetMode); + + g_module_symbol (mod, "ci_adv_af_start", (gpointer) &mfldadvci->af_start); + g_module_symbol (mod, "ci_adv_af_stop", (gpointer) &mfldadvci->af_stop); + + g_module_symbol (mod, "ci_adv_process_frame", (gpointer) &mfldadvci->AeAfAwb_process); + g_module_symbol (mod, "ci_adv_uninit", (gpointer) &mfldadvci->uninit); + g_module_symbol (mod, "ci_adv_init", (gpointer) &mfldadvci->isp_set_fd); + g_module_symbol (mod, "ci_adv_configure", (gpointer) &mfldadvci->switch_mode); + g_module_symbol (mod, "ci_adv_ae_set_flash_mode", (gpointer) &mfldadvci->AeSetFlashMode); + g_module_symbol (mod, "ia_3a_ae_get_flash_mode", (gpointer) &mfldadvci->AeGetFlashMode); + g_module_symbol (mod, "ci_adv_awb_set_mode", (gpointer) &mfldadvci->AwbSetMode); + g_module_symbol (mod, "ia_3a_awb_get_mode", (gpointer) &mfldadvci->AwbGetMode); + g_module_symbol (mod, "ci_adv_ae_is_flash_necessary", (gpointer) &mfldadvci->ae_is_flash_needed); + g_module_symbol (mod, "ci_adv_af_need_assist_light", (gpointer) &mfldadvci->af_assist_light_needed); + g_module_symbol (mod, "ci_adv_ae_set_manual_iso", (gpointer) &mfldadvci->AeSetIso); + g_module_symbol (mod, "ci_adv_ae_set_flicker_mode", (gpointer) &mfldadvci->AeSetFlickerMode); + g_module_symbol (mod, "ia_3a_ae_get_flicker_mode", (gpointer) &mfldadvci->AeGetFlickerMode); + g_module_symbol (mod, "ci_adv_ae_set_exposure_program", (gpointer) &mfldadvci->AeSetExposureProgram); + g_module_symbol (mod, "ci_adv_ae_get_exposure_program", (gpointer) &mfldadvci->AeGetExposureProgram); + g_module_symbol (mod, "ci_adv_process_for_flash", (gpointer) &mfldadvci->process_flash); + g_module_symbol (mod, "ci_adv_af_set_range", (gpointer) &mfldadvci->AfSetRange); + g_module_symbol (mod, "ia_3a_af_get_focus_range", (gpointer) &mfldadvci->AfGetRange); + g_module_symbol (mod, "ci_adv_af_version", (gpointer) &mfldadvci->AfVersion); + g_module_symbol (mod, "ci_adv_awb_version", (gpointer) &mfldadvci->AwbVersion); + g_module_symbol (mod, "ci_adv_ae_version", (gpointer) &mfldadvci->AeVersion); + g_module_symbol (mod, "ci_adv_awb_set_light_source", (gpointer) &mfldadvci->AwbSetLightSource); + g_module_symbol (mod, "ia_3a_awb_get_light_source", (gpointer) &mfldadvci->AwbGetLightSource); + g_module_symbol (mod, "ci_adv_ae_set_manual_shutter", (gpointer) &mfldadvci->AeGetManualShutter); + g_module_symbol (mod, "ia_3a_ae_get_manual_aperture", (gpointer) &mfldadvci->AeGetManualAperture); + g_module_symbol (mod, "ia_3a_awb_get_manual_color_temperature", (gpointer) &mfldadvci->AwbGetManualColorTemperature); + g_module_symbol (mod, "ci_adv_ae_set_window", (gpointer) &mfldadvci->AeSetWindow); + g_module_symbol (mod, "ci_adv_af_set_windows", (gpointer) &mfldadvci->AfSetWindows); + g_module_symbol (mod, "ia_3a_ae_get_window", (gpointer) &mfldadvci->AeGetWindow); + g_module_symbol (mod, "ci_adv_ae_set_metering_mode", (gpointer) &mfldadvci->AeSetMeteringMode); + g_module_symbol (mod, "ia_3a_ae_get_metering_mode", (gpointer) &mfldadvci->AeGetMeteringMode); + g_module_symbol (mod, "ci_adv_af_set_metering_mode", (gpointer) &mfldadvci->AfSetMeteringMode); + g_module_symbol (mod, "ci_adv_af_get_metering_mode", (gpointer) &mfldadvci->AfGetMeteringMode); + g_module_symbol (mod, "ci_adv_isp_set_image_effect", (gpointer) &mfldadvci->AwbSetImageEffect); + g_module_symbol (mod, "ci_adv_isp_get_image_effect", (gpointer) &mfldadvci->AwbGetImageEffect); + g_module_symbol (mod, "ci_adv_af_get_status", (gpointer) &mfldadvci->AfGetStillAfStatus); + + g_module_symbol (mod, "ci_adv_ae_enable", (gpointer) &mfldadvci->AwbEnable); + g_module_symbol (mod, "ci_adv_af_enable", (gpointer) &mfldadvci->AeEnable); + g_module_symbol (mod, "ci_adv_awb_enable", (gpointer) &mfldadvci->AfEnable); + + g_module_symbol (mod, "ci_adv_awb_lock", (gpointer) &mfldadvci->AwbLock); + g_module_symbol (mod, "ci_adv_ae_lock", (gpointer) &mfldadvci->AeLock); + g_module_symbol (mod, "ci_adv_af_lock", (gpointer) &mfldadvci->AfLock); + + g_module_symbol (mod, "ci_adv_ae_set_mode", (gpointer) &mfldadvci->AeSetMode); + g_module_symbol (mod, "ci_adv_ae_get_mode", (gpointer) &mfldadvci->AeGetMode); + + mfldadvci->initialized = 0; + // TODO + mfldadvci->init = wrapper_default_init; + mfldadvci->mode_spec_init = wrapper_default_int_void; + mfldadvci->update_dis_results = wrapper_default_void; + mfldadvci->do_redeye_removal = wrapper_default_do_redeye_removal; + mfldadvci->load_gdc_table = wrapper_default_void; + mfldadvci->AeSetBias = wrapper_default_int_int; + mfldadvci->AeGetBias = wrapper_default_intp; + mfldadvci->AeGetIso = wrapper_default_AeGetManualIso; + mfldadvci->RerVersion = wrapper_default_RerVersion; + +} + +void +wrapper_default_link_functions_init (GstV4l2MFLDAdvCI *mfldadvci) +{ + + mfldadvci->initialized = 0; + mfldadvci->init = wrapper_default_init; + mfldadvci->uninit = wrapper_default_uninit; + mfldadvci->isp_set_fd = wrapper_default_isp_set_fd; + mfldadvci->mode_spec_init = wrapper_default_int_void; /* TODO */ + mfldadvci->switch_mode = wrapper_default_switch_mode; + + mfldadvci->AeAfAwb_process = wrapper_default_AeAfAwb_process; + + mfldadvci->process_flash = wrapper_default_process_flash; + + mfldadvci->ae_is_flash_needed = wrapper_default_is_flash_needed; + mfldadvci->af_assist_light_needed = wrapper_default_af_assist_light_needed; + + mfldadvci->ae_apply_results = wrapper_default_ae; + + mfldadvci->af_start = wrapper_default_af_start; + mfldadvci->af_stop = wrapper_default_af_stop; + + mfldadvci->update_dis_results = wrapper_default_void; + + mfldadvci->do_redeye_removal = wrapper_default_do_redeye_removal; + mfldadvci->load_gdc_table = wrapper_default_void; + + mfldadvci->AeSetBias = wrapper_default_int_int; + mfldadvci->AeGetBias = wrapper_default_intp; + mfldadvci->AeSetFlickerMode = wrapper_default_AeSetFlickerMode; + mfldadvci->AeGetFlickerMode = wrapper_default_AeGetFlickerMode; + mfldadvci->AeSetExposureProgram = wrapper_default_AeSetExposureProgram; + mfldadvci->AeGetExposureProgram = wrapper_default_AeGetExposureProgram; + mfldadvci->AeSetMeteringMode = wrapper_default_AeSetMeteringMode; + mfldadvci->AeGetMeteringMode = wrapper_default_AeGetMeteringMode; + mfldadvci->AeGetWindow = wrapper_default_Ae_GetWindow; + mfldadvci->AeSetWindow = wrapper_default_Ae_SetWindow; + mfldadvci->AeSetIso = wrapper_default_AeSetManualIso; + mfldadvci->AeGetIso = wrapper_default_AeGetManualIso; + + + mfldadvci->AfSetMode = wrapper_default_AfSetMode; + mfldadvci->AfGetMode = wrapper_default_AfGetMode; + mfldadvci->AfSetRange = wrapper_default_AfSetRange; + mfldadvci->AfGetRange = wrapper_default_AfGetRange; + mfldadvci->AfSetMeteringMode = wrapper_default_AfSetMeteringMode; + mfldadvci->AfGetMeteringMode = wrapper_default_AfGetMeteringMode; + mfldadvci->AfSetWindows = wrapper_default_Af_SetWindows; + + mfldadvci->AwbSetMode = wrapper_default_AwbSetMode; + mfldadvci->AwbGetMode = wrapper_default_AwbGetMode; + mfldadvci->AwbSetLightSource = wrapper_default_AwbSetLightSource; + mfldadvci->AwbGetLightSource = wrapper_default_AwbGetLightSource; + mfldadvci->AwbSetImageEffect = wrapper_default_AwbSetImageEffect; + mfldadvci->AwbGetImageEffect = wrapper_default_AwbGetImageEffect; + + mfldadvci->AeGetWindowsNum = wrapper_default_AeGetWindowsNum; + mfldadvci->AwbVersion = wrapper_default_AwbVersion; + mfldadvci->AeVersion = wrapper_default_AeVersion; + mfldadvci->AfVersion = wrapper_default_AfVersion; + mfldadvci->AfGetStillAfStatus = wrapper_default_AfGetStillAfStatus; + mfldadvci->RerVersion = wrapper_default_RerVersion; + mfldadvci->AeGetManualShutter = wrapper_default_AeGetManualShutter; + mfldadvci->AeGetManualAperture = wrapper_default_AeGetManualAperture; + mfldadvci->AeSetFlashMode = wrapper_default_AeSetFlashMode; + mfldadvci->AeGetFlashMode = wrapper_default_AeGetFlashMode; + mfldadvci->AwbGetManualColorTemperature = wrapper_default_AwbGetManualColorTemperature; + + mfldadvci->AwbEnable = wrapper_default_enable; + mfldadvci->AeEnable = wrapper_default_enable; + mfldadvci->AfEnable = wrapper_default_enable; + + mfldadvci->AwbLock = wrapper_default_enable; + mfldadvci->AeLock = wrapper_default_enable; + mfldadvci->AfLock = wrapper_default_enable; + + mfldadvci->AeSetMode = wrapper_default_AeSetMode; + mfldadvci->AeGetMode = wrapper_default_AeGetMode; + + +} diff --git a/gst-libs/atomisphal/gstv4l2mfldadvci.h b/gst-libs/atomisphal/gstv4l2mfldadvci.h new file mode 100644 index 0000000..b5848f8 --- /dev/null +++ b/gst-libs/atomisphal/gstv4l2mfldadvci.h @@ -0,0 +1,285 @@ +/* GStreamer MFLD ADVIC API + + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ + +#ifndef _GST_V4L2MFLDADVCI_H_ +#define _GST_V4L2MFLDADVCI_H_ + +/* LIBMFLDADVCI_VERSION define the version number between camera source and + * libmfldadvci. + * Update this version number when you update the GstV4l2MFLDAdvCI structure + */ + +#define LIBMFLDADVCI_VERSION 20110128 + +#include +#include +#include +#include +#include "atomisp.h" + +typedef enum { + ci_adv_Success, + ci_adv_NotImplemented, + ci_adv_GeneralError, + ci_adv_InvalidArgument, + ci_adv_OutOfMemory, + /* extend this enum with error identifiers needed to correctly + report all possible errors. */ +} ci_adv_Err; + +typedef enum +{ + advci_isp_mode_preview, + advci_isp_mode_video, + advci_isp_mode_capture, + advci_isp_mode_continuous +} advci_isp_mode; + +typedef struct { + void *addr; + int length; + int width; + int height; + int format; +} advci_frame_t; + +typedef enum { + advci_ae_flicker_mode_off, /* No flicker correction */ + advci_ae_flicker_mode_50hz, /* Correct 50Hz flicker */ + advci_ae_flicker_mode_60hz, /* Correct 60Hz flicker */ + advci_ae_flicker_mode_auto /* Auto detect and correct flicker */ +} advci_ae_flicker_mode; + +typedef enum { + advci_ae_exposure_program_auto, + advci_ae_exposure_program_portrait, + advci_ae_exposure_program_sports, + advci_ae_exposure_program_landscape, + advci_ae_exposure_program_night, + advci_ae_exposure_program_fireworks +} advci_ae_exposure_program; + +typedef enum { + advci_ae_metering_mode_auto, /* Automatic */ + advci_ae_metering_mode_spot, /* Spot metering, */ + advci_ae_metering_mode_center, /* Center-weighted metering */ + advci_ae_metering_mode_customized, /* Customized */ + advci_ae_metering_mode_average, /* Average metering */ +} advci_ae_metering_mode; + + +typedef struct { + int x_left; + int x_right; + int y_top; + int y_bottom; + int weight; +} advci_window; + +typedef enum { + advci_af_metering_mode_auto, + advci_af_metering_mode_spot, + advci_af_metering_mode_multi, +} advci_af_metering_mode; + + +typedef enum { + advci_af_range_norm, + advci_af_range_macro, + advci_af_range_full +} advci_af_range; + +/* Auto White Balance */ +typedef enum { + advci_awb_mode_auto, /* Corrected colors are calibrated to match changes in illuminant color */ + advci_awb_mode_manual /* The current color balance is retained */ +} advci_awb_mode; + +typedef enum { + advci_image_effect_none = 0, + advci_image_effect_bw, + advci_image_effect_sepia, + advci_image_effect_negative +} advci_image_effect; + +typedef enum { + advci_awb_light_source_other, + advci_awb_light_source_filament_lamp, + advci_awb_light_source_clear_sky, + advci_awb_light_source_cloudiness, + advci_awb_light_source_shadow_area, + advci_awb_light_source_fluorlamp_w, + advci_awb_light_source_fluorlamp_n, + advci_awb_light_source_fluorlamp_d, +} advci_awb_light_source; + +typedef enum { + advci_ae_flash_mode_auto, /* Automatic (don't flash when it's dark) */ + advci_ae_flash_mode_off, /* Off */ + advci_ae_flash_mode_on, /* On */ + advci_ae_flash_mode_day_sync, /* Day Sync */ + advci_ae_flash_mode_slow_sync, /* Slow Sync */ +} advci_ae_flash_mode; + +typedef struct { + char *data; + unsigned int bytes; +} advci_mknote; + +typedef enum { + advci_mknote_mode_jpeg, /* Returns makernote data for JPEG EXIF */ + advci_mknote_mode_raw /* Returns makernote data for RAW Header */ +} advci_mknote_mode; + +enum { + SENSOR_TYPE_RAW = 1, + SENSOR_TYPE_SOC +}; + +typedef enum { + advci_af_mode_auto, + advci_af_mode_manual, + advci_af_mode_video +} advci_af_mode; + +typedef enum { + advci_ae_mode_auto, /* Automatic */ + advci_ae_mode_manual, /* Manual */ + advci_ae_mode_shutter_priority, /* Shutter priority */ + advci_ae_mode_aperture_priority /* Aperture priority */ +} advci_ae_mode; + + +typedef enum { + advci_af_status_error, /* Cannot focus */ + advci_af_status_success, /* Successfully focussed */ + advci_af_status_cancelled, /* Canceled by user */ + advci_af_status_busy, /* Busy focussing */ + advci_af_status_idle /* AF not running */ +} advci_af_status; + +typedef enum { + advci_flash_stage_none, + advci_flash_stage_pre, + advci_flash_stage_main +} advci_flash_stage; + + +typedef struct _GstV4l2MFLDAdvCI GstV4l2MFLDAdvCI; +/** + * GstV4l2MFLDAdvCI + */ +struct _GstV4l2MFLDAdvCI +{ + void *advci_priv; + int initialized; + + /* 3A control API */ + + int (*mode_spec_init)(void); + void (*switch_mode)(advci_isp_mode mode, float frame_rate); + void (*init) (void); + void (*uninit) (void); + int (*isp_set_fd)(const char *sensor_id,int fd, const char *file); + + void (*AeAfAwb_process)(gboolean process, struct timeval *frame_timestamp); + + void (*process_flash)(advci_flash_stage stage); + void (*ae_is_flash_needed)(gboolean *useflash); + void (*ae_apply_results)(void); + + void (*af_start)(void); + void (*af_stop)(void); + void (*af_assist_light_needed)(gboolean *needed); + + void (*update_dis_results)(void); + + void (*do_redeye_removal) (advci_frame_t *user_buf); + + void (*load_gdc_table)(void); + + /* 3A property API */ + ci_adv_Err (*AeSetBias)(int bias); /* bias: -2.0(EV) to +2.0(EV) */ + ci_adv_Err (*AeGetBias)(int *bias); /* bias: -2.0(EV) to +2.0(EV) */ + void (*AeSetFlickerMode)(advci_ae_flicker_mode mode); + advci_ae_flicker_mode (*AeGetFlickerMode)(void); + void (*AeSetExposureProgram)(advci_ae_exposure_program program); + void (*AeGetExposureProgram)(advci_ae_exposure_program *program); + void (*AeSetMeteringMode)(advci_ae_metering_mode mode); + advci_ae_metering_mode (*AeGetMeteringMode)(void); + void (*AeSetWindow)(advci_window *window); + + void (*AeGetWindow)(advci_window *window); + ci_adv_Err (*AeSetIso)(float sensitivity); + ci_adv_Err (*AeGetIso)(float *sensitivity); + + void (*AfSetMode)(advci_af_mode mode); + advci_af_mode (*AfGetMode)(void); + void (*AfSetMeteringMode)(advci_af_metering_mode mode); + void (*AfGetMeteringMode)(advci_af_metering_mode *mode); + void (*AfSetWindows)(int count, advci_window *window); + + void (*AfSetRange)(advci_af_range range); + advci_af_range (*AfGetRange)(void); + + + void (*AeSetMode)(advci_ae_mode mode); + void (*AeGetMode)(advci_ae_mode * mode); + + /* AWB mode */ + void (*AwbSetMode)(advci_awb_mode mode); + advci_awb_mode (*AwbGetMode)(void); + + /* color tone */ + void (*AwbSetImageEffect) (advci_image_effect effect); + advci_image_effect (*AwbGetImageEffect)(void); + + + /* AWB Light Source */ + void (*AwbSetLightSource)(advci_awb_light_source ls); + advci_awb_light_source (*AwbGetLightSource)(void); + + ci_adv_Err (*AeGetWindowsNum)(int *num); + void (*AwbVersion)(int * major, int * minor); + void (*AeVersion)(int * major, int * minor); + void (*AfVersion)(int * major, int * minor); + advci_af_status (*AfGetStillAfStatus)(void); + void (*RerVersion)(int * major, int * minor); + ci_adv_Err (*AeGetManualShutter)(float *time); + int (*AeGetManualAperture)(void); + advci_ae_flash_mode (*AeGetFlashMode)(void); + void (*AeSetFlashMode)(advci_ae_flash_mode mode); + void (*AwbGetManualColorTemperature)(int *ctemp); + + void (*AwbEnable)(gboolean enable); + void (*AeEnable)(gboolean enable); + void (*AfEnable)(gboolean enable); + + void (*AwbLock)(gboolean lock); + void (*AeLock)(gboolean lock); + void (*AfLock)(gboolean lock); + +}; + +void lib_3a_link_functions_init (GstV4l2MFLDAdvCI *mfldadvci, GModule *mod); +void wrapper_default_link_functions_init (GstV4l2MFLDAdvCI *mfldadvci); + +#endif diff --git a/gst-libs/atomisphal/mfld_cam.c b/gst-libs/atomisphal/mfld_cam.c new file mode 100644 index 0000000..7f6277a --- /dev/null +++ b/gst-libs/atomisphal/mfld_cam.c @@ -0,0 +1,2039 @@ +/* Gstreamer MFLD camera source abstract Layer API + * Copyright (c) 2010 Intel Corporation + + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ +#include +#include +#include +#include +#include +#include + +#include + +#include "atomisp.h" + +#include "mfld_cam.h" +#include "mfld_driver.h" + +#include +#include +#include +#include + +sem_t g_sem_3a; + +/* Focus Status Map */ +static const int focus_status_map[][2] = +{ + { CAM_FOCUS_STATUS_IDLE , advci_af_status_idle}, /* AF not running */ + { CAM_FOCUS_STATUS_RUNNING, advci_af_status_busy}, /* Busy focusing */ + { CAM_FOCUS_STATUS_SUCCESS, advci_af_status_success}, /* Successfully focussed */ + { CAM_FOCUS_STATUS_FAIL, advci_af_status_error}, /* Cannot focus */ + { CAM_FOCUS_STATUS_SUCCESS, advci_af_status_cancelled,},/* Canceled by user */ + { -1, -1 } + +}; + +/* TODO */ +#define DIS_COMPOSITE_FRAMES 3 + + +/* + * Return mapping for item from given table. If @reverse is FALSE, @item is + * considered as a GstPhotography enumeration value and the function will + * return matching atomispcam value for it. If @reverse is TRUE, @item + * is considered as a atomispcam value and this function returns + * corresponding GstPhotography value. If mapping is not found, mapping + * for table index 0 is returned. + */ +int +cam_find_item_new (const int table[][2], const int item, int reverse) +{ + unsigned int i = 0; + int index = reverse ? 1 : 0; + + /* Shortcut didn't work, go through the whole table to find the item */ + while (table[i][0] != -1) { + if (table[i][index] == item) { + return table[i][index ^ 1]; + } + i++; + } + + return table[0][index ^ 1]; +} + + +/* The ISP mode set to the driver. Driver switch the binary according to the Mode + * */ +/* + * Saving for the ISP status in the library */ + +struct mfld_driver_t +{ + int fd; + cam_capture_mode_t capture_mode; + int rer_enabled; + int dis_enabled, dvs_enabled; + int af_enabled, ae_enabled, awb_enabled; + int af_result; // 0 means fail, it will be filled when captured finished. + int still_af_count, start_still_af; + advci_window window; + struct timeval af_start_time; + int initflag; + int mmap; + int first_frame; + int g_3a_started; + //GThread *thread; + pthread_t thread; + unsigned int width, height, fourcc; + struct atomisp_dis_vector dvs_vector; + float frame_rate; + int sensor_type; + struct timeval timestamp; + int focus_done; + gboolean enable_torch; + gboolean flash_used; +}; + +struct buffer +{ + void *start; + size_t length; +}; + +/* TODO: Check if this is needed anymore after focus works like it should. + * Hysteriss added to focus_done + * in some case when old focus is still in progress and appication + * ask new focus to be started might be a case that 3A returns false "focus ready" + * and status info is sent to app. -> for some reason it will crash to that. +* */ +#define FOCUS_REALLY_DONE 2 + +/* Maximum auto focus time */ +static const int STILL_AF_MAX_TIME_IN_MS = 2200; + +/* FIXME: Add lock to protec this global variable + * */ +static struct mfld_cam_settings_t mfld_cam_settings; +static struct mfld_driver_t mfld_driver; +static GstV4l2MFLDAdvCI *mfldadvci; + +static inline long +calculate_timediff(struct timeval *t0, struct timeval *t1) +{ + return ((t1->tv_sec - t0->tv_sec) * 1000000 + + t1->tv_usec - t0->tv_usec) / 1000; +} + +static void +clear_bit (int nr, unsigned int *addr) +{ + unsigned int mask = (1UL << (nr) % 32); + unsigned int *p = ((unsigned int *) addr) + nr / 32; + + *p &= ~mask; +} + +static void +set_bit (int nr, unsigned int *addr) +{ + unsigned int mask = (1UL << (nr) % 32); + unsigned int *p = ((unsigned int *) addr) + nr / 32; + + *p |= mask; +} + +/* Set the white balance Mode throught the 3A library */ +static cam_err_t +set_wb_mode (int wb_mode) +{ + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + switch (wb_mode) { + case CAM_AWB_MODE_AUTO: + mfldadvci->AwbSetMode (advci_awb_mode_auto); + break; + case CAM_AWB_MODE_DAYLIGHT: + mfldadvci->AwbSetMode (advci_awb_mode_manual); + mfldadvci->AwbSetLightSource (advci_awb_light_source_clear_sky); + break; + case CAM_AWB_MODE_CLOUDY: + mfldadvci->AwbSetMode (advci_awb_mode_manual); + mfldadvci->AwbSetLightSource (advci_awb_light_source_cloudiness); + break; + case CAM_AWB_MODE_SUNSET: + mfldadvci->AwbSetMode (advci_awb_mode_manual); + mfldadvci->AwbSetLightSource (advci_awb_light_source_shadow_area); + break; + case CAM_AWB_MODE_TUNGSTEN: + mfldadvci->AwbSetMode (advci_awb_mode_manual); + mfldadvci->AwbSetLightSource (advci_awb_light_source_filament_lamp); + break; + case CAM_AWB_MODE_FLUORESCENT: + mfldadvci->AwbSetMode (advci_awb_mode_manual); + mfldadvci->AwbSetLightSource (advci_awb_light_source_fluorlamp_w); + break; + default: + cam_driver_dbg ("Not support awb mode\n"); + return CAM_ERR_UNSUPP; + } + } + mfld_cam_settings.wb_mode = wb_mode; + return CAM_ERR_NONE; +} + +cam_err_t +get_wb_mode (int *wb_mode) +{ + advci_awb_light_source ls; + advci_awb_mode mode; + + if (mfld_driver.sensor_type != SENSOR_TYPE_RAW) { + *wb_mode = CAM_AWB_MODE_AUTO; + return CAM_ERR_NONE; + } + + mode = mfldadvci->AwbGetMode(); + if (mode == advci_awb_mode_auto) { + *wb_mode = CAM_AWB_MODE_AUTO; + return CAM_ERR_NONE; + } + //Manual Mode, check the error? + ls = mfldadvci->AwbGetLightSource(); + switch (ls) { + case advci_awb_light_source_filament_lamp: + *wb_mode = CAM_AWB_MODE_TUNGSTEN; + break; + case advci_awb_light_source_clear_sky: + *wb_mode = CAM_AWB_MODE_DAYLIGHT; + break; + case advci_awb_light_source_cloudiness: + *wb_mode = CAM_AWB_MODE_CLOUDY; + break; + case advci_awb_light_source_shadow_area: + *wb_mode = CAM_AWB_MODE_SUNSET; + break; + case advci_awb_light_source_fluorlamp_w: + case advci_awb_light_source_fluorlamp_n: + case advci_awb_light_source_fluorlamp_d: + *wb_mode = CAM_AWB_MODE_FLUORESCENT; + break; + default: + //Use daylight as default + *wb_mode = CAM_AWB_MODE_DAYLIGHT; + break; + } + + return CAM_ERR_NONE; +} + +static cam_err_t +set_tone_mode (int fd, int mode) +{ + cam_err_t ret = CAM_ERR_NONE; + enum v4l2_colorfx colorfx = mode; + + cam_driver_dbg ("%s mode %d\n", __func__, mode); + + ret = cam_driver_set_tone_mode (fd, colorfx); + if (ret) + mfld_cam_settings.tone_mode = mode; + return ret; +} + +static cam_err_t +get_tone_mode (int fd, int *mode) +{ + cam_err_t ret = CAM_ERR_NONE; + ret = cam_driver_get_tone_mode (fd, mode); + return ret; +} + +static cam_err_t +set_scene_mode (int mode) +{ + advci_ae_exposure_program scene; + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + switch (mode) { + case CAM_GENERAL_SCENE_MODE_PORTRAIT: + scene = advci_ae_exposure_program_portrait; + break; + case CAM_GENERAL_SCENE_MODE_LANDSCAPE: + scene = advci_ae_exposure_program_landscape; + break; + case CAM_GENERAL_SCENE_MODE_SPORT: + scene = advci_ae_exposure_program_sports; + break; + case CAM_GENERAL_SCENE_MODE_NIGHT: + scene = advci_ae_exposure_program_night; + break; + case CAM_GENERAL_SCENE_MODE_AUTO: + scene = advci_ae_exposure_program_auto; + break; + case CAM_GENERAL_SCENE_MODE_FIREWORKS: + scene = advci_ae_exposure_program_fireworks; + break; + default: + cam_driver_dbg ("%s Not supported Scene Mode %d\n", __func__, mode); + break; + } + mfldadvci->AeSetExposureProgram (scene); + } + mfld_cam_settings.scene_mode = mode; + return CAM_ERR_NONE; +} + +static cam_err_t +get_scene_mode (int *scene_mode) +{ + *scene_mode = mfld_cam_settings.scene_mode; + return CAM_ERR_NONE; +} + +static cam_err_t +set_iso_speed (int iso) +{ + cam_err_t ret = CAM_ERR_NONE; + + float sv; + mfld_cam_settings.iso_speed = iso; + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + if (iso <=0) + return CAM_ERR_PARAM; + sv = log10((float)iso / 3.125) / log10(2.0); + ret = mfldadvci->AeSetIso(sv); + } + return ret; +} + +static cam_err_t +get_iso_speed(int *iso) +{ + *iso = mfld_cam_settings.iso_speed; + return CAM_ERR_NONE; +} + +static cam_err_t +set_flash_mode (int fd, int mode) +{ + cam_err_t ret = CAM_ERR_NONE; + mfld_cam_settings.flash_mode = mode; + + cam_driver_dbg ("set flash mode: %d\n",mode); + /* TODO check this when making auto modes etc.. */ + switch (mode) { + case CAM_LIGHT_FLASH_MODE_AUTO: + case CAM_LIGHT_FLASH_MODE_ON: + case CAM_LIGHT_FLASH_MODE_FILL_IN: + case CAM_LIGHT_FLASH_MODE_RED_EYE: + case CAM_LIGHT_FLASH_MODE_OFF: + if(!mfld_driver.enable_torch) + ret = cam_driver_set_flash_mode(fd,ATOMISP_FLASH_MODE_OFF); + break; + } + set_ae_flash_mode(fd,mode); + return ret; +} + +static cam_err_t +get_flash_mode (int *flash_mode) +{ + *flash_mode = mfld_cam_settings.flash_mode; + return CAM_ERR_NONE; +} + +static cam_err_t +set_flicker_mode (int mode) +{ + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + /* Convert camera genreral flicker mode to AE flicker mode */ + switch (mode) { + case CAM_GENERAL_FLICKER_REDUCTION_MODE_OFF: + mfldadvci->AeSetFlickerMode (advci_ae_flicker_mode_off); + break; + case CAM_GENERAL_FLICKER_REDUCTION_MODE_50HZ: + mfldadvci->AeSetFlickerMode (advci_ae_flicker_mode_50hz); + break; + case CAM_GENERAL_FLICKER_REDUCTION_MODE_60HZ: + mfldadvci->AeSetFlickerMode (advci_ae_flicker_mode_60hz); + break; + case CAM_GENERAL_FLICKER_REDUCTION_MODE_AUTO: + mfldadvci->AeSetFlickerMode (advci_ae_flicker_mode_auto); + break; + default: + cam_driver_dbg ("Not supported flicker mode\n"); + return CAM_ERR_UNSUPP; + } + } + return CAM_ERR_NONE; +} + +static cam_err_t +get_flicker_mode (int *mode) +{ + int ret = CAM_ERR_NONE; + advci_ae_flicker_mode flicker_mode; + + if (mfld_driver.sensor_type != SENSOR_TYPE_RAW) { + *mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_AUTO; + return ret; + } + flicker_mode = mfldadvci->AeGetFlickerMode(); + + /* Convert AE flicker mode to camera general flicker mode */ + switch (flicker_mode) { + case advci_ae_flicker_mode_off: + *mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_OFF; + break; + case advci_ae_flicker_mode_50hz: + *mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_50HZ; + break; + case advci_ae_flicker_mode_60hz: + *mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_60HZ; + break; + case advci_ae_flicker_mode_auto: + *mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_AUTO; + break; + default: + cam_driver_dbg ("Not supported flicker mode\n"); + return CAM_ERR_UNSUPP; + } + + return ret; +} + +static cam_err_t +set_focus_mode (int mode) +{ + + cam_driver_dbg ("set_focus_mode: %d\n", mode); + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW){ + switch (mode) { + case CAM_FOCUS_MODE_AUTO: + mfldadvci->AfSetMode (advci_af_mode_manual); + mfldadvci->AfSetRange (advci_af_range_norm); + mfldadvci->AfSetMeteringMode(advci_af_metering_mode_auto); + break; + case CAM_FOCUS_MODE_TOUCH_AUTO: + mfldadvci->AfSetMode (advci_af_mode_auto); + mfldadvci->AfSetRange (advci_af_range_full); + mfldadvci->AfSetMeteringMode(advci_af_metering_mode_multi); + break; + case CAM_FOCUS_MODE_MACRO: + mfldadvci->AfSetMode (advci_af_mode_auto); + mfldadvci->AfSetRange (advci_af_range_macro); + mfldadvci->AfSetMeteringMode(advci_af_metering_mode_auto); + break; + case CAM_FOCUS_MODE_NORM: + mfldadvci->AfSetMode (advci_af_mode_auto); + mfldadvci->AfSetRange (advci_af_range_norm); + mfldadvci->AfSetMeteringMode(advci_af_metering_mode_auto); + break; + case CAM_FOCUS_MODE_FULL: + mfldadvci->AfSetMode (advci_af_mode_manual); + mfldadvci->AfSetRange (advci_af_range_full); + break; + case CAM_FOCUS_MODE_CONTINUOUS: + mfldadvci->AfSetMode (advci_af_mode_auto); + mfldadvci->AfSetRange (advci_af_range_norm); + mfldadvci->AfSetMeteringMode(advci_af_metering_mode_auto); + break; + default: + cam_driver_dbg ("Not supported mode\n"); + return CAM_ERR_UNSUPP; + } + } + + mfld_cam_settings.focus_mode = mode; + return CAM_ERR_NONE; +} + +static cam_err_t +get_focus_mode (int *mode) +{ + *mode = mfld_cam_settings.focus_mode; + return CAM_ERR_NONE; +} + +/* Only update the noise that set by user */ +static cam_err_t +set_noise_reduction (int fd, int mode) +{ + cam_err_t ret = CAM_ERR_NONE; + /* Only update the mode changed */ + int tmp_mode = mode ^ mfld_cam_settings.noise_reduction; + + if (tmp_mode & (1 << CAM_NOISE_REDUCTION_EXTRA)) { + if (mode & (1 << CAM_NOISE_REDUCTION_EXTRA)) + ret = cam_driver_set_xnr (fd, ON); + else + ret = cam_driver_set_xnr (fd, OFF); + } + + if (tmp_mode & (1 << CAM_NOISE_REDUCTION_BAYER)) { + if (mode & (1 << CAM_NOISE_REDUCTION_BAYER)) + ret = cam_driver_set_bnr (fd, ON); + else + ret = cam_driver_set_bnr (fd, OFF); + } + + if (tmp_mode & (1 << CAM_NOISE_REDUCTION_YCC)) { + if (mode & (1 << CAM_NOISE_REDUCTION_YCC)) + ret = cam_driver_set_ynr (fd, ON); + else + ret = cam_driver_set_ynr (fd, OFF); + } + + if (tmp_mode & (1 << CAM_NOISE_REDUCTION_TEMPORAL)) { + if (mode & (1 << CAM_NOISE_REDUCTION_TEMPORAL)) + ret = cam_driver_set_tnr (fd, ON); + else + ret = cam_driver_set_tnr (fd, OFF); + } + + if (tmp_mode & (1 << CAM_NOISE_REDUCTION_FIXED_PATTERN)) { + if (mode & (1 << CAM_NOISE_REDUCTION_FIXED_PATTERN)) + ret = cam_driver_set_fpn (fd, ON); + else + ret = cam_driver_set_fpn (fd, OFF); + } + //FIXME: Add false handling here + mfld_cam_settings.noise_reduction = mode; + return ret; +} + +static cam_err_t +get_noise_reduction (int *mode) +{ + *mode = mfld_cam_settings.noise_reduction; + return CAM_ERR_NONE; +} + +/* Only update the advanced features that set by user */ +static cam_err_t +set_capture_correction (int fd, int mode) +{ + cam_err_t ret = 0; + /* Only update the mode changed */ + int tmp_mode = mode ^ mfld_cam_settings.capture_correction; + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_GDC)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_GDC)) { + mfldadvci->load_gdc_table(); + ret = cam_driver_set_gdc (fd, ON); + } + else + ret = cam_driver_set_gdc (fd, OFF); + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_RER)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_RER)) { + mfld_driver.rer_enabled = 1; + } else { + mfld_driver.rer_enabled = 0; + } + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_DIS)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_DIS)) { + mfld_driver.dis_enabled = 1; + } else { + mfld_driver.dis_enabled = 0; + } + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_DVS)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_DVS)) { + mfld_driver.dvs_enabled = 1; + ret = cam_driver_set_dvs (fd, ON); + } else { + mfld_driver.dvs_enabled = 0; + ret = cam_driver_set_dvs (fd, OFF); + } + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_CAC)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_CAC)) + ret = cam_driver_set_cac (fd, ON); + else + ret = cam_driver_set_cac (fd, OFF); + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_EE)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_EE)) + ret = cam_driver_set_ee (fd, ON); + else + ret = cam_driver_set_ee (fd, OFF); + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_SC)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_SC)) + ret = cam_driver_set_sc (fd, ON); + else + ret = cam_driver_set_sc (fd, OFF); + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_BLC)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_BLC)) + ret = cam_driver_set_blc (fd, ON); + else + ret = cam_driver_set_blc (fd, OFF); + } + + if (tmp_mode & (1 << CAM_CAPTURE_CORRECTION_BPD)) { + if (mode & (1 << CAM_CAPTURE_CORRECTION_BPD)) + ret = cam_driver_set_bpd (fd, ON); + else + ret = cam_driver_set_bpd (fd, OFF); + } + + mfld_cam_settings.capture_correction = mode; + return 0; +} + +static cam_err_t +get_capture_correction (int *mode) +{ + *mode = mfld_cam_settings.capture_correction; + return CAM_ERR_NONE; +} + +/* set_capture_fmt: Tell the driver what format we want to produce, and + * driver tells us what format we should capture. This mechanism exists + * because the driver may want to capture some exotic RAW format from + * the sensor, process it and finally convert to desired format + * (in @preprocess function). + */ +#if 0 +static gboolean +cam_driver_set_capture_fmt (GstCameraSrc * camerasrc, + guint32 output_fourcc, guint output_outsize, + guint32 * internal_fourcc, guint * internal_outsize) +{ + gboolean ret; + cam_driver_dbg ("%s\n", __func__); + //Acutally, we don't plan to implement more than set format now + struct v4l2_format fmt; + memset (&fmt, 0, sizeof (fmt)); + *internal_fourcc = V4L2_PIX_FMT_YUV420; + return TRUE; +} + +/* Tell the driver what resolution we want to capture. + */ +static gboolean +cam_driver_set_capture_res (GstCameraSrc * camerasrc, guint width, guint height) +{ + gboolean ret; + // Just set the capture format + struct v4l2_format fmt; + mfld_driver.height = height; + mfld_driver.width = width; + cam_driver_dbg ("%s: set res to width %u, height %u\n", __func__, width, + height); +#if 0 + memset (&fmt, 0, sizeof (fmt)); + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = width; + fmt.fmt.pix.height = height; + fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUV420; + ret = ioctl (mfld_driver.fd, VIDIOC_S_FMT, &fmt); + if (ret < 0) + return FALSE; + return TRUE; +#endif + return TRUE; +} + +#endif +/* + * What is this function used for ? + * TBD + */ +cam_err_t +cam_set_capture_mode (int fd, cam_capture_mode_t mode) +{ + cam_err_t ret = CAM_ERR_NONE; + cam_driver_dbg ("%s\n", __func__); + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW){ + switch (mode) { + case CAM_CAPTURE_MODE_VIEWFINDER: + mfldadvci->switch_mode (advci_isp_mode_preview, mfld_driver.frame_rate); + set_flash_mode(fd, mfld_cam_settings.flash_mode); + cam_driver_dbg ("%s: VIEWFINDER Mode is set\n", __func__); + break; + case CAM_CAPTURE_MODE_VIDEO_RECORD: + mfldadvci->switch_mode (advci_isp_mode_video, mfld_driver.frame_rate); + cam_driver_dbg ("%s: VIDEO Mode is set\n", __func__); + break; + case CAM_CAPTURE_MODE_STILL_CAPTURE: + mfldadvci->switch_mode (advci_isp_mode_capture, mfld_driver.frame_rate); + cam_driver_dbg ("%s: STILL Mode is set\n", __func__); + break; + default: + mfldadvci->switch_mode (advci_isp_mode_preview, mfld_driver.frame_rate); + cam_driver_dbg ("%s: NULL Mode is set\n", __func__); + break; + } + } + else + mfld_driver.first_frame = 1; + + mfld_driver.capture_mode = mode; + return ret; +} + +cam_err_t +cam_set_capture_fmt (int fd, unsigned int width, unsigned int height, + unsigned int fourcc) +{ + mfld_driver.width = width; + mfld_driver.height = height; + mfld_driver.fourcc = fourcc; + + mfldadvci->mode_spec_init (); + + return CAM_ERR_NONE; +} + +cam_err_t +cam_set_zoom (int fd, float zoom) +{ + cam_err_t ret = CAM_ERR_NONE; + unsigned int zoomval; + + /* Map 1.0 - 10.0 to 1 - 64 */ + zoomval = (unsigned int)((zoom - 1.0) * 63.0 / 9) + 1; + + if (zoomval < 1 ) + zoomval = 1; + if (zoomval > 64) + zoomval = 64; + + ret = cam_driver_set_zoom (fd, zoomval); + cam_driver_dbg ("%s\n set zoom to %u, return %d\n", __func__, zoomval, ret); + + return ret; +} + + +/* start: Called by v4l2camerasrc at startup (before STREAMON). Driver should + * start processing the frames now. + */ +cam_err_t +cam_driver_start (int fd) +{ + cam_err_t ret = CAM_ERR_NONE; + cam_driver_dbg ("%s\n", __func__); + + return ret; +} + +/* Called by v4l2camerasrc when stopping (before STREAMOFF). Driver + * must stop processing the frames. + */ +cam_err_t +cam_driver_stop (int fd) +{ + cam_err_t ret = CAM_ERR_NONE; + cam_driver_dbg ("%s\n", __func__); + return ret; +} + +cam_err_t +cam_set_autoexposure (int fd, int on) +{ + enum v4l2_exposure_auto_type expo; + cam_err_t ret = CAM_ERR_NONE; + if (on) + expo = V4L2_EXPOSURE_AUTO; + else + expo = V4L2_EXPOSURE_MANUAL; + cam_driver_set_autoexposure (fd, expo); + return ret; +} + +/* Start or stop autofocusing. + */ +cam_err_t +cam_set_autofocus (int on) +{ + cam_driver_dbg ("%s\n", __func__); + if (on) + mfld_driver.start_still_af = 1; + else + mfld_driver.start_still_af = 0; + + return CAM_ERR_NONE; +} + +/* Turn on / off the flash. + */ +cam_err_t +cam_set_flash (int fd, int on) +{ + cam_driver_dbg ("%s\n", __func__); + cam_err_t ret = CAM_ERR_NONE; + mfld_driver.enable_torch = on; + if( cam_driver_set_torch(fd, on)== CAM_ERR_NONE) + mfld_driver.enable_torch = on; + else + mfld_driver.enable_torch = FALSE; + return ret; +} + +/* Ask the suggested flash intensity value. + */ +cam_err_t +cam_get_flash_intensity (int fd, int on) +{ + cam_driver_dbg ("%s\n", __func__); + return CAM_ERR_NONE; +} + +/* set_flash_intensity: Set flash intensity value. + */ +cam_err_t +cam_set_flash_intensity (int fd, int on) +{ + cam_driver_dbg ("%s\n", __func__); + return CAM_ERR_NONE; +} + +/* set_flash_duration: Set the flash timeout value. + */ +cam_err_t +cam_set_flash_duration (int fd, int duration) +{ + cam_driver_dbg ("%s\n", __func__); + return CAM_ERR_NONE; +} + +/* Turn on / off privacy light. + */ +cam_err_t +cam_set_privacy_light (int fd, int on) +{ + cam_driver_dbg ("%s\n", __func__); + return CAM_ERR_NONE; +} + +/* V4l2camerasrc uses this to query autofocusing status. + */ +int +cam_checkfocus_status (cam_focus_status_t * status, int force_update) +{ + advci_af_status af_status; + gboolean do_read = FALSE; + + if (mfld_driver.sensor_type != SENSOR_TYPE_RAW) { + return 0; + } + + if (mfld_driver.focus_done >= FOCUS_REALLY_DONE) { + mfld_driver.focus_done = 0; + do_read = TRUE; + } + if ((do_read == TRUE) || (force_update == 1)) { + af_status = mfldadvci->AfGetStillAfStatus(); + *status = cam_find_item_new (focus_status_map, af_status, 1); + cam_driver_dbg ("%s *status : %d\n ", __func__, *status); + return 1; + } + return 0; +} + +/* V4l2camerasrc uses this to query the risk for image shaking. + */ +cam_err_t +cam_get_shake_risk (int fd, int risk) +{ + cam_driver_dbg ("%s\n", __func__); + return CAM_ERR_NONE; +} + +/* start_capture: Called by v4l2camerasrc when the HQ-image capture process starts */ +cam_err_t +cam_start_capture (int fd) +{ + cam_err_t ret = CAM_ERR_NONE; + cam_driver_dbg ("%s\n", __func__); +#if 0 + if (mfld_driver.width <= 640 && mfld_driver.height <= 480) + ret = ci_isp_switch_binary (fd, IspModePreview); + else + ret = ci_isp_switch_binary (fd, IspModePrimary); + if (ret) + return FALSE; +#endif + return ret; +} + +/* @stop_capture: Called by v4l2camerasrc when the HQ-image capture has + * finished.*/ +cam_err_t +cam_stop_capture (int fd) +{ + cam_driver_dbg ("%s\n", __func__); + return CAM_ERR_NONE; +} + +/* capture: If driver implements this vmethod, v4l2camerasrc's built-in HQ image + * capture mechamism is skipped and this function is used to capture + * the HQ image. + * In MFLD implementation, we do the redeye reduction and DIS here + */ +#if 0 +static gboolean +cam_driver_capture (GstCameraSrc * camerasrc, GstBuffer ** buf, + gpointer user_data) +{ + cam_driver_dbg ("%s\n", __func__); + //GstV4l2NewCamSrc *v4l2camsrc = (GstV4l2NewCamSrc *)user_data; + + if (mfld_driver.rer_enabled == 1 || mfld_driver.dis_enabled == 1) { + //mfld_capture_frames(buf, v4l2camsrc); + return CAM_ERR_NONE; + } + + return FALSE; +} +#endif + +/* process: Perform some image improvements for the given buffer. This + * vmethod is called after @stop_capture vmethod. + */ +cam_err_t +cam_post_process (int fd) +{ + cam_err_t ret = CAM_ERR_NONE; + cam_driver_dbg ("%s\n", __func__); + + return ret; +} + +/* get_makernote; Retrieve MakerNote data chunk from camera device. + */ +cam_err_t +cam_get_makernote (int fd, unsigned char *buf, unsigned size) +{ + cam_driver_dbg ("%s, !!!!!!line:%d\n", __func__, __LINE__); + cam_err_t ret = CAM_ERR_NONE; + + ret = cam_driver_get_makernote (fd, buf, size); + + return ret; +} + +cam_err_t +cam_set_capture_correction (int fd, cam_capture_correction_t mode, int on) +{ + cam_err_t ret; + unsigned int capture_correction; + + cam_feature_get (fd, CAM_CAPTURE_CORRECTION, &capture_correction); + + if (on) + set_bit (mode, &capture_correction); + else + clear_bit (mode, &capture_correction); + + ret = cam_feature_set (fd, CAM_CAPTURE_CORRECTION, capture_correction); + return ret; +} + +cam_err_t +cam_set_tone_control (int fd, cam_tone_control_t tone, + struct tone_control * value) +{ + cam_err_t ret = CAM_ERR_NONE; + switch (tone) { + case CAM_GAMMA_VALUE: + if (mfld_cam_settings.tone.gamma != value->gamma) { + ret = cam_driver_set_gamma (fd, value->gamma); + mfld_cam_settings.tone.gamma = value->gamma; + } + break; + case CAM_BRIGHTNESS_VALUE: + if (mfld_cam_settings.tone.brightness != value->brightness) { + ret = cam_driver_set_contrast (fd, mfld_cam_settings.tone.contrast, + value->brightness); + mfld_cam_settings.tone.brightness = value->brightness; + } + break; + case CAM_CONTRAST_VALUE: + if (mfld_cam_settings.tone.contrast != value->contrast) { + ret = cam_driver_set_contrast (fd, value->contrast, + mfld_cam_settings.tone.brightness); + mfld_cam_settings.tone.contrast = value->contrast; + } + break; + default: + cam_driver_dbg ("Unsupported tone control mode\n"); + return CAM_ERR_UNSUPP; + } + return ret; +} + +cam_err_t +cam_get_tone_control (int fd, struct tone_control * tone) +{ + *tone = mfld_cam_settings.tone; + return CAM_ERR_NONE; +} + +static const char *photo_features [] = +{ + "awb_mode", + "color tone mode", + "scene mode", + "flash mode", + "exposure", + "aperture", + "ev_compensation", + "iso value", + "focus mode", + "flicker mode", + "noise reduction mode", + "capture correction", +}; + +/* write_settings: Called when some settings in v4l2camerasrc's #GstPhotoSettings + * structure has changed. Driver needs to forward the settings to device. + */ +cam_err_t +cam_feature_set (int fd, cam_photo_feature_t feature, int value) +{ + cam_err_t ret = CAM_ERR_NONE; + int bias; + + cam_driver_dbg ("%s: feature %s, value %d\n", __func__, + photo_features[feature], value); + switch (feature) { + case CAM_AWB_MODE: + ret = set_wb_mode (value); + break; + case CAM_GENERAL_EFFECT_TYPE: + ret = set_tone_mode (fd, value); + break; + case CAM_GENERAL_SCENE_MODE: + ret = set_scene_mode (value); + break; + case CAM_LIGHT_FLASH_MODE: + ret = set_flash_mode (fd, value); + break; + case CAM_EXPOSURE_MANUAL_TIME: + ret = cam_driver_set_exposure (fd, value); + break; + case CAM_EXPOSURE_MANUAL_APERTURE: + ret = cam_driver_set_aperture (fd, value); + break; + case CAM_EXPOSURE_COMPENSATION: + if (value > 20) + value = 20; + if (value < -20) + value = -20; + bias = (int) ((double) value / 10 * 65536.0); + ret = mfldadvci->AeSetBias (bias); + break; + case CAM_EXPOSURE_ISO_VALUE: + ret = set_iso_speed (value); + break; + case CAM_FOCUS_MODE: + ret = set_focus_mode (value); + break; + case CAM_GENERAL_FLICKER_REDUCTION_MODE: + ret = set_flicker_mode (value); + break; + case CAM_NOISE_REDUCTION_MODE: + ret = set_noise_reduction (fd, value); + break; + case CAM_CAPTURE_CORRECTION: + ret = set_capture_correction (fd, value); + break; + default: + cam_driver_dbg ("Not supported photography features\n"); + break; + } + + if (ret) + cam_err_print (ret); + + return ret; +} + +/* read_settings: V4l2camerasrc tells the driver to retrieve settings from + * device and store them into #GstPhotoSettings structure. + */ +cam_err_t +cam_feature_get (int fd, cam_photo_feature_t feature, int *value) +{ + int bias; + cam_err_t ret; + + cam_driver_dbg ("%s: feature %s, \n", __func__, + photo_features[feature]); + if (value == NULL) { + cam_driver_dbg ("%s value is NULL\n", __func__); + return CAM_ERR_PARAM; + } + + switch (feature) { + case CAM_AWB_MODE: + ret = get_wb_mode (value); + break; + case CAM_GENERAL_EFFECT_TYPE: + ret = get_tone_mode (fd, value); + break; + case CAM_GENERAL_SCENE_MODE: + ret = get_scene_mode(value); + break; + case CAM_LIGHT_FLASH_MODE: + ret = get_flash_mode (value); + break; + case CAM_EXPOSURE_MANUAL_TIME: + ret = cam_driver_get_exposure (fd, value); + break; + case CAM_EXPOSURE_MANUAL_APERTURE: + ret = cam_driver_get_aperture (fd, value); + break; + case CAM_EXPOSURE_COMPENSATION: + ret = mfldadvci->AeGetBias (&bias); + *value = (int) (((double) bias / 65536.0) * 10); + break; + case CAM_EXPOSURE_ISO_VALUE: + ret = get_iso_speed (value); + break; + case CAM_FOCUS_MODE: + ret = get_focus_mode (value); + break; + case CAM_GENERAL_FLICKER_REDUCTION_MODE: + ret = get_flicker_mode (value); + break; + case CAM_NOISE_REDUCTION_MODE: + ret = get_noise_reduction (value); + break; + case CAM_CAPTURE_CORRECTION: + ret = get_capture_correction (value); + break; + default: + cam_driver_dbg ("Not supported photography features\n"); + ret = CAM_ERR_UNSUPP; + break; + } + if (ret) + cam_err_print (ret); + + return ret; +} + +/* init: Initialize the driver. V4l2camerasrc calls this immediately after + * the camera device has been opened and registers the device file + * descriptor to the driver. + */ + +/* Do the basic init here */ +void +libmfld_cam_init (GstV4l2MFLDAdvCI * advci) +{ + /* Initialize the driver structure */ + cam_driver_dbg ("%s\n", __func__); + + if (sem_init(&g_sem_3a, 0, 0) < 0) { + cam_driver_dbg("Init g_sem_3a failed\n"); + } + + memset (&mfld_cam_settings, 0, sizeof (mfld_cam_settings)); + + /* These modes are on by default */ + mfld_cam_settings.noise_reduction = + CAM_NOISE_REDUCTION_YCC | CAM_NOISE_REDUCTION_BAYER; + mfld_cam_settings.capture_correction = + (1 << CAM_CAPTURE_CORRECTION_EE) | (1 << CAM_CAPTURE_CORRECTION_SC) | + (1 << CAM_CAPTURE_CORRECTION_BPD); + + mfld_cam_settings.zoom = 1.0; //Zoom i in feature set 2 + mfld_cam_settings.ev_compensation = 0.0; + mfld_cam_settings.exposure = 0; // Auto + mfld_cam_settings.aperture = 0; + mfld_cam_settings.iso_speed = 0; + + mfld_cam_settings.tone.gamma = 2.2; + mfld_cam_settings.tone.brightness = 0; + mfld_cam_settings.tone.contrast = 256; + + mfld_cam_settings.wb_mode = CAM_AWB_MODE_AUTO; + mfld_cam_settings.scene_mode = CAM_GENERAL_SCENE_MODE_AUTO; + mfld_cam_settings.flash_mode = CAM_LIGHT_FLASH_MODE_AUTO; + mfld_cam_settings.tone_mode = CAM_GENERAL_EFFECT_TYPE_NORMAL; + mfld_cam_settings.flicker_mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_50HZ; + mfld_cam_settings.focus_mode = CAM_FOCUS_MODE_AUTO; + + mfld_driver.ae_enabled = 0; + mfld_driver.af_enabled = 0; + mfld_driver.awb_enabled = 0; + mfld_driver.dis_enabled = 0; + mfld_driver.dvs_enabled = 0; + mfld_driver.rer_enabled = 0; + mfld_driver.start_still_af = 0; + mfld_driver.still_af_count = 0; + + mfld_driver.first_frame = 1; + + mfld_driver.mmap = 1; + mfld_driver.g_3a_started = 0; + + mfld_driver.dvs_vector.x = 0; + mfld_driver.dvs_vector.y = 0; + mfld_driver.focus_done = 0; + mfld_driver.flash_used = FALSE; + mfld_driver.enable_torch = FALSE; + mfldadvci = advci; + + /* Initalize the 3A library */ + mfldadvci->init (); +} + +void +libmfld_cam_dispose (void) +{ + if (mfld_driver.g_3a_started) { + pthread_cancel (mfld_driver.thread); + mfld_driver.thread = 0; + } + mfld_driver.g_3a_started = 0; + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) + mfldadvci->uninit (); + return; +} + +cam_err_t +cam_driver_init (int fd, const char *sensor_id) +{ + cam_driver_dbg ("%s\n", __func__); + + mfld_driver.fd = fd; + + if (mfldadvci->isp_set_fd (sensor_id, fd, NULL) == 0){ + mfld_driver.sensor_type = SENSOR_TYPE_RAW; + + mfldadvci->AwbEnable(TRUE); + mfldadvci->AwbLock(FALSE); + mfldadvci->AeEnable(TRUE); + mfldadvci->AeLock(FALSE); + mfldadvci->AfEnable(TRUE); + mfldadvci->AfLock(FALSE); + + mfld_driver.window.x_left = 0; + mfld_driver.window.x_right = 0, + mfld_driver.window.y_top = 0; + mfld_driver.window.y_bottom = 0; + mfld_driver.window.weight = 0; + mfldadvci->AeSetWindow(&mfld_driver.window); + + mfldadvci->AeSetExposureProgram(advci_ae_exposure_program_auto); + mfldadvci->AeSetMode(advci_ae_mode_auto); + mfldadvci->AeSetMeteringMode(advci_ae_metering_mode_center); + mfldadvci->AfSetRange (advci_af_range_full); + mfldadvci->AwbSetMode(advci_awb_mode_auto); + mfldadvci->AfSetMode(advci_af_mode_auto); + mfldadvci->AfSetMeteringMode (advci_af_metering_mode_auto); + } + else + mfld_driver.sensor_type = SENSOR_TYPE_SOC; + + cam_driver_init_gamma (fd); + + /* FixMe Need to get the ISO speed , apture when use need to read these + * settings + */ + mfld_driver.initflag = 1; /* We are ready to call the dz_zoom */ + /* Do other driver related init here */ + + mfld_driver.thread = 0; + mfld_driver.frame_rate = 15.0f; + + return CAM_ERR_NONE; +} + +/* deinit: Deinitialize the driver. V4l2camerasrc tells the driver to stop + * using the camera device, because after this call it is not valid anymore. + */ +cam_err_t +cam_driver_deinit (int fd) +{ + cam_driver_dbg ("%s\n", __func__); + /* Initialize the driver structure */ + //GstCameraSrcClass *camera_class = GST_CAMERA_SRC_GET_CLASS (camerasrc); + mfld_driver.initflag = 0; + if (mfld_driver.g_3a_started) { + pthread_cancel (mfld_driver.thread); + mfld_driver.g_3a_started = 0; + } + /* Do other driver related deinit here */ + led_flash_off(fd); + return CAM_ERR_NONE; +} + +cam_err_t +cam_set_frame_rate( float frame_rate) +{ + mfld_driver.frame_rate = frame_rate; + return CAM_ERR_NONE; +} + +/* + * For Red Eye Reduction and Still Image Stabilization + */ +static void +cam_copy_v4l2buf_to_user (advci_frame_t *user, struct v4l2_buffer *buf) +{ + void *addr; + if (mfld_driver.mmap) { + addr = mmap (NULL, buf->length, PROT_READ | PROT_WRITE, MAP_SHARED, + mfld_driver.fd, buf->m.offset); + if (MAP_FAILED == addr) { + cam_driver_dbg ("Mapped failed\n"); + return; + } + } else { + addr = (void *) buf->m.userptr; + } + + user->length = buf->length; + user->addr = malloc (buf->length); + if (user->addr == NULL) { + cam_driver_dbg ("Malloc buf->lenth %d failed\n", buf->length); + if (mfld_driver.mmap) + munmap (addr, buf->length); + return; + } + memcpy (user->addr, addr, buf->length); + if (mfld_driver.mmap) + munmap (addr, buf->length); +} + +static void +cam_free_bufs (advci_frame_t *bufs) +{ + int i; + for (i = 0; i < DIS_COMPOSITE_FRAMES; i++) { + if (bufs[i].addr != NULL) + free (bufs[i].addr); + bufs[i].addr = NULL; + bufs[i].length = 0; + } +} + +static void +cam_still_compose (struct v4l2_buffer *buf, advci_frame_t bufs[], + int frame_dis, struct atomisp_dis_vector vectors[]) +{ + advci_frame_t com_buf; + + cam_driver_dbg ("Still Compose Phase 1: Begin\n"); + com_buf.length = buf->length; + + if (mfld_driver.mmap) + com_buf.addr = mmap (NULL, buf->length, PROT_READ | PROT_WRITE, MAP_SHARED, + mfld_driver.fd, buf->m.offset); + else + com_buf.addr = (void *) buf->m.userptr; + com_buf.width = mfld_driver.width; + com_buf.height = mfld_driver.height; + + if (mfld_driver.mmap) + munmap (com_buf.addr, com_buf.length); +} + +static void +cam_do_redeye_removal (struct v4l2_buffer *buf) +{ + + + advci_frame_t user_buf; + user_buf.length = buf->length; + if (mfld_driver.mmap) + user_buf.addr = mmap (NULL, buf->length, PROT_READ | PROT_WRITE, MAP_SHARED, + mfld_driver.fd, buf->m.offset); + else + user_buf.addr = (void *) buf->m.userptr; + user_buf.width = mfld_driver.width; + user_buf.height = mfld_driver.height; + mfldadvci->do_redeye_removal (&user_buf); + + if (mfld_driver.mmap) + munmap (user_buf.addr, user_buf.length); +} + +static void +write_image(const void *data, const int size) +{ + char filename[50]; + static unsigned int count = 0; + size_t bytes; + FILE *fp; + + snprintf(filename, 50, "dump_image_%d_%d_00%u.%s", mfld_driver.width, mfld_driver.height, + count, "yuv"); + + fp = fopen (filename, "w+"); + if (fp == NULL) { + cam_driver_dbg ("open file %s failed %s\n", filename, strerror (errno)); + exit (0); + } + + cam_driver_dbg ("Begin write image %s\n", filename); + if ((bytes = fwrite (data, size, 1, fp)) < size) + cam_driver_dbg ("Write less bytes to %s: %d, %d\n", filename, + size, bytes); + count++; + + fclose (fp); +} + +static void +dump_v4l2_buffer(struct v4l2_buffer *buffer) +{ + void *data; + if (mfld_driver.mmap) + data = mmap (NULL, buffer->length, PROT_READ | PROT_WRITE, MAP_SHARED, + mfld_driver.fd, buffer->m.offset); + else + data = (void *) buffer->m.userptr; + + write_image(data, buffer->length); + + if (mfld_driver.mmap) + munmap(data, buffer->length); +} + + +#define PAGE_ALIGN(x) ((x + 0xfff) & 0xfffff000) +static void +dump_raw_image (int fd, struct cam_capture_settings *st) +{ + unsigned int bytes; + char *buf; + char *filename; + struct buffer raw_buffer; + static unsigned int fn_count = 0; + FILE *fp; + char fn_buf[200]; + + filename = "./"; + + cam_driver_dbg ("%s, output_size %d\n", __func__, st->raw_output_size); + + raw_buffer.length = st->raw_output_size; + raw_buffer.start = mmap (NULL /* start anywhere */ , + PAGE_ALIGN (st->raw_output_size), PROT_READ | PROT_WRITE /* required */ , + MAP_SHARED /* recommended */ , + fd, 0xfffff000); + if (MAP_FAILED == raw_buffer.start) { + cam_driver_dbg ("mmap raw image failed"); + return; + } + cam_driver_dbg ("MMAP raw address from kernel 0x%x\n", raw_buffer.start); + buf = (char *) raw_buffer.start; + + memset (fn_buf, 0, sizeof (char) * 100); + snprintf (fn_buf, 200, "%s/%d_%d_%u.%s", filename, mfld_driver.width, + mfld_driver.height, fn_count++, "rawoutput.raw"); + fp = fopen (fn_buf, "w+"); + if (fp == NULL) { + cam_driver_dbg ("open file %s failed %s\n", fn_buf, strerror (errno)); + exit (0); + } + + cam_driver_dbg ("The size of raw image %d\n", st->raw_output_size); + + if ((bytes = fwrite (buf, st->raw_output_size, 1, fp)) < st->raw_output_size) + cam_driver_dbg ("Write less bytes to %s: %d, %d\n", fn_buf, + st->raw_output_size, bytes); + + fclose (fp); +} + +static gpointer +mfldcam_3a_process (gpointer data) +{ + for (;;) { + sem_wait(&g_sem_3a); + /* Read 3A statistics */ + if ((mfld_driver.ae_enabled || mfld_driver.af_enabled + || mfld_driver.awb_enabled || mfld_driver.dis_enabled + || mfld_driver.dvs_enabled) && (mfld_driver.sensor_type == SENSOR_TYPE_RAW) ) + { + /* AE, AWB and AF Process */ + mfldadvci->AeAfAwb_process(TRUE,&mfld_driver.timestamp); + + + } else + return NULL; + } +} + +void +mfldcam_3a_start (void) +{ + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW){ + if (!mfld_driver.g_3a_started) { + mfld_driver.g_3a_started = 1; + if (pthread_create (&mfld_driver.thread, NULL, mfldcam_3a_process, + NULL) != 0) { + mfld_driver.g_3a_started = 0; + cam_driver_dbg ("Create thread failed %s\n", __func__);; + } + } + } +} + +void +mfldcam_3a_stop (void) +{ + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW){ + if (mfld_driver.g_3a_started) + pthread_cancel (mfld_driver.thread); + mfld_driver.g_3a_started = 0; + } +} + +static cam_err_t +run_flash_sequence (int fd, struct v4l2_buffer *buffer) +{ + // non flash + led_flash_off (fd); + mfldadvci->process_flash(advci_flash_stage_none); + if (-1 == ioctl (fd, VIDIOC_DQBUF, buffer)) { + cam_driver_dbg ("%s: Error dqbuf %d\n", __func__, 0); + return CAM_ERR_SYS; + } + // pre flash + mfldadvci->process_flash (advci_flash_stage_pre); + led_flash_trigger (fd, 0, 0); + if (-1 == ioctl (fd, VIDIOC_QBUF, buffer)) { + cam_driver_dbg ("%s: Error qbuf %d\n", __func__, 1); + return CAM_ERR_SYS; + } + if (-1 == ioctl (fd, VIDIOC_DQBUF, buffer)) { + cam_driver_dbg ("%s: Error dqbuf %d\n", __func__, 1); + return CAM_ERR_SYS; + } + // main flash + mfldadvci->process_flash (advci_flash_stage_main); + led_flash_trigger (fd, 0, 15); + if (-1 == ioctl (fd, VIDIOC_QBUF, buffer)) { + cam_driver_dbg ("%s: Error qbuf %d\n", __func__, 2); + return CAM_ERR_SYS; + } + if (-1 == ioctl (fd, VIDIOC_DQBUF, buffer)) { + cam_driver_dbg ("%s: Error dqbuf %d\n", __func__, 2); + return CAM_ERR_SYS; + } + mfld_driver.flash_used = TRUE; + return CAM_ERR_NONE; +} + +/* Discard the first two frames because of the first frame + * from the driver is black. */ +static cam_err_t +discard_first_frame(int fd, struct v4l2_buffer *buffer, int count) +{ + int i; + for (i = 0; i < count; i++) { + if (-1 == ioctl (fd, VIDIOC_DQBUF, buffer)) { + cam_driver_dbg ("%s: Error dqbuf %d\n", __func__, count); + return CAM_ERR_SYS; + } + if (-1 == ioctl (fd, VIDIOC_QBUF, buffer)) { + cam_driver_dbg ("%s: Error qbuf %d\n", __func__, count); + return CAM_ERR_SYS; + } + } + return CAM_ERR_NONE; +} + +static void +cam_capture_init (int fd, struct v4l2_buffer *buffer, + struct cam_capture_settings *capture_settings) +{ + gboolean need_assist = FALSE; + mfld_driver.mmap = capture_settings->mmap; + mfld_driver.ae_enabled = capture_settings->ae; + mfld_driver.af_enabled = capture_settings->af; + mfld_driver.awb_enabled = capture_settings->awb; + + /* Discard the first two frames */ + if (mfld_driver.first_frame) { + discard_first_frame(fd, buffer, 2); + mfld_driver.first_frame = 0; + } + /* Still AF start */ + if ((mfld_driver.start_still_af) && (mfld_driver.sensor_type == SENSOR_TYPE_RAW)) { + if (mfld_driver.af_enabled && mfld_driver.still_af_count > 0) + mfldadvci->af_stop (); + mfld_driver.focus_done = 0; + mfld_driver.start_still_af = 0; + mfld_driver.still_af_count = 1; + mfld_driver.af_start_time = mfld_driver.timestamp; + mfldadvci->af_assist_light_needed(&need_assist); + if (need_assist) { + mfld_driver.enable_torch = TRUE; + cam_driver_set_torch(fd, TRUE); + } + if (mfld_driver.af_enabled) { + mfldadvci->af_start (); + if(mfld_cam_settings.focus_mode == CAM_FOCUS_MODE_TOUCH_AUTO) { + mfldadvci->AfSetWindows (1, &mfld_driver.window ); + mfldadvci->AeSetWindow(&mfld_driver.window ); + } + } + } +} + +static gboolean +get_flash_status(void) +{ + int cur_flash_mode = CAM_LIGHT_FLASH_MODE_OFF; + gboolean flash_en = FALSE; + + get_flash_mode(&cur_flash_mode); + + switch (cur_flash_mode) { + case CAM_LIGHT_FLASH_MODE_ON: + flash_en = TRUE; + break; + case CAM_LIGHT_FLASH_MODE_AUTO: + case CAM_LIGHT_FLASH_MODE_RED_EYE: + mfldadvci->ae_is_flash_needed(&flash_en); + + break; + default: + /* other mode, turn flash off */ + ; + } + return flash_en; +} + +static void +cam_dis_dvs_processing (struct atomisp_dis_vector *vectors, advci_frame_t *bufs, + struct v4l2_buffer *buffer, int frame_cnt, int dis_enabled) +{ + struct atomisp_dis_vector *dvs_vector; + + cam_driver_dbg ("Begin cam_dis_get_statices\n"); + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + + cam_driver_dbg ("Begin cam_dis_still_process\n"); + + if (mfld_driver.capture_mode == CAM_CAPTURE_MODE_VIDEO_RECORD) + dvs_vector = &mfld_driver.dvs_vector; + else + dvs_vector = NULL; + // TODO !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!1 + // mfldadvci->dis_process (dvs_vector); /* dis_vector only valid in video mode */ + + if (dis_enabled) { + // mfldadvci->dis_calc_still (&vectors[frame_cnt], frame_cnt); + cam_copy_v4l2buf_to_user (&bufs[frame_cnt], buffer); + } + + cam_driver_dbg ("Update DIS results\n"); + mfldadvci->update_dis_results(); + } +} + +static gboolean cam_af_is_complete(void) +{ + cam_driver_dbg("%s:%d\n", __func__, __LINE__); + + advci_af_status status; + gboolean completed = FALSE; + + status = mfldadvci->AfGetStillAfStatus(); + + if (status == advci_af_status_error) { + cam_driver_dbg("==== still AF failed \n"); + completed = TRUE; + } + else if (status == advci_af_status_success) { + cam_driver_dbg("==== still AF success\n"); + completed = TRUE; + } else if (status == advci_af_status_cancelled) { + cam_driver_dbg("==== still AF cancelled \n"); + completed = TRUE; + } + else { + cam_driver_dbg("==== still AF continue %d \n", status); + } + + return completed; +} + +static cam_err_t +run_normal_sequence(int fd, struct v4l2_buffer *buffer) +{ + int frame_cnt, frame_dis; + struct atomisp_dis_vector vectors[DIS_COMPOSITE_FRAMES]; + advci_frame_t bufs[DIS_COMPOSITE_FRAMES]; + + + int dis_enabled = mfld_driver.dis_enabled && + (mfld_driver.fourcc == V4L2_PIX_FMT_YUV420); + + for (frame_cnt = 0; frame_cnt < DIS_COMPOSITE_FRAMES; frame_cnt++) { + bufs[frame_cnt].addr = NULL; + bufs[frame_cnt].width = mfld_driver.width; + bufs[frame_cnt].height = mfld_driver.height; + } + + if (dis_enabled) + frame_dis = DIS_COMPOSITE_FRAMES; + else + frame_dis = 1; + + for (frame_cnt = 0; frame_cnt < frame_dis; frame_cnt++) { + if (-1 == ioctl (fd, VIDIOC_DQBUF, buffer)) { + cam_driver_dbg ("%s: Error dqbuf %d\n", __func__, frame_cnt); + return CAM_ERR_SYS; + } + + mfld_driver.timestamp = buffer->timestamp; + sem_post(&g_sem_3a); + + /* DIS and DVS processing */ + if (dis_enabled || mfld_driver.dvs_enabled) + cam_dis_dvs_processing(vectors, bufs, buffer, frame_cnt, dis_enabled); + + /* Qbuffer for the DIS */ + if (frame_cnt >= 1) + if (-1 == ioctl (fd, VIDIOC_QBUF, buffer)) { + cam_driver_dbg ("%s: Error qbuf %d\n", __func__, frame_cnt); + return CAM_ERR_SYS; + } + } + + if (dis_enabled) { + cam_driver_dbg ("Do the still image compose\n"); + cam_still_compose (buffer, bufs, frame_dis, vectors); + } + + cam_free_bufs (bufs); + + /* Still AF processing */ + if (mfld_driver.still_af_count) { + gboolean complete = TRUE; /* AF status */ + + if (mfld_driver.af_enabled) + complete = cam_af_is_complete(); + + if (complete || + (calculate_timediff(&mfld_driver.af_start_time, &mfld_driver.timestamp) + > STILL_AF_MAX_TIME_IN_MS) ) { + mfld_driver.focus_done ++; + if(mfld_driver.focus_done >= FOCUS_REALLY_DONE) { + mfld_driver.still_af_count = 0; + if (complete == 0) { + mfldadvci->af_stop(); + cam_driver_dbg ("AF: Focus Failed %s\n", __func__); + } + mfld_driver.enable_torch= FALSE; + cam_driver_set_torch(fd, FALSE); + } + else + mfld_driver.still_af_count++; + } else + mfld_driver.still_af_count++; + } + mfld_driver.flash_used = FALSE; + return CAM_ERR_NONE; +} + +cam_err_t +cam_capture_frames (int fd, struct v4l2_buffer *buffer, + struct cam_capture_settings *capture_settings) +{ + int index; + int frame_dis = 1, frame_cnt; + cam_err_t ret = 0; + gboolean flash_en = FALSE; + + cam_capture_init (fd, buffer, capture_settings); + + if ((mfld_driver.capture_mode == CAM_CAPTURE_MODE_STILL_CAPTURE ) && + (mfld_driver.sensor_type == SENSOR_TYPE_RAW) ) { + /* check flash here not in always when capturing frames */ + flash_en = get_flash_status (); + // stop still AF processing if running + if (mfld_driver.still_af_count) { + mfldadvci->af_stop(); + if(mfld_driver.enable_torch) { + mfld_driver.enable_torch= FALSE; + cam_driver_set_torch(fd, FALSE); + } + mfld_driver.still_af_count = 0; + } + if ((flash_en == TRUE) && (mfld_driver.enable_torch == FALSE)){ + run_flash_sequence (fd, buffer); + led_flash_off(fd); + } + else{ + //cam_driver_set_indication_intensity(fd, INDICATOR_INTENSITY); + //cam_driver_set_flash_mode(fd,ATOMISP_FLASH_MODE_INDICATOR); + ret = run_normal_sequence(fd, buffer); + /* restore flash mode */ + if (mfld_driver.enable_torch == FALSE) + cam_driver_set_flash_mode(fd,ATOMISP_FLASH_MODE_OFF); + } + } else { + ret = run_normal_sequence(fd, buffer); + } + if (ret) + return ret; + + + if (mfld_driver.rer_enabled && (mfld_driver.fourcc == V4L2_PIX_FMT_YUV420)) + cam_do_redeye_removal (buffer); + + if (capture_settings->dump_raw) + dump_raw_image (fd, capture_settings); + + if (capture_settings->dump_image) + dump_v4l2_buffer(buffer); + + return CAM_ERR_NONE; +} + +cam_err_t get_ae_windows_num(int fd, int * num) +{ + *num = 1; + + return CAM_ERR_NONE; +} + +void get_awb_version(int fd, int * major, int * minor) +{ + mfldadvci->AwbVersion(major, minor); +} + +void get_ae_version(int fd, int * major, int * minor) +{ + mfldadvci->AeVersion(major, minor); +} + +void get_af_version(int fd, int * major, int * minor) +{ + mfldadvci->AfVersion(major, minor); +} + +cam_err_t cam_get_focus_posi(int fd, unsigned * posi) +{ + return cam_driver_get_focus_posi(fd, posi); +} + +void cam_set_af_ae_window(advci_window *window) +{ + mfldadvci->AfSetWindows (1, window); + mfldadvci->AeSetWindow(window); + mfld_driver.window = *window; +} + +void get_af_result(int fd, unsigned * result) +{ + *result = mfld_driver.af_result; // 0 means fail, it will be filled when captured finished. +} + +cam_err_t get_af_status(int fd, unsigned * status) +{ + advci_af_status st; + + + st = mfldadvci->AfGetStillAfStatus(); + + if(advci_af_status_success == st) + *status = 1; + else + *status = 0; + + return CAM_ERR_NONE; +} + +cam_err_t get_exp_mode(int fd, cam_scene_mode_t *expmode) +{ + advci_ae_exposure_program mode; + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + + mfldadvci->AeGetExposureProgram(&mode); + + /* Convert AE flicker mode to camera general flicker mode */ + switch (mode) { + case advci_ae_exposure_program_auto: + *expmode = CAM_GENERAL_SCENE_MODE_AUTO; + break; + case advci_ae_exposure_program_portrait: + *expmode = CAM_GENERAL_SCENE_MODE_PORTRAIT; + break; + case advci_ae_exposure_program_sports: + *expmode = CAM_GENERAL_SCENE_MODE_SPORT; + break; + case advci_ae_exposure_program_landscape: + *expmode = CAM_GENERAL_SCENE_MODE_LANDSCAPE; + break; + case advci_ae_exposure_program_night: + *expmode = CAM_GENERAL_SCENE_MODE_NIGHT; + break; + case advci_ae_exposure_program_fireworks: + *expmode = CAM_GENERAL_SCENE_MODE_FIREWORKS; + break; + default: + cam_driver_dbg ("Not supported exposure mode\n"); + return CAM_ERR_UNSUPP; + } + } + else { + *expmode = CAM_GENERAL_SCENE_MODE_AUTO; + } + + return CAM_ERR_NONE; +} + +void get_ae_bias(int fd, int * bias) +{ + mfldadvci->AeGetBias(bias); +} + +void get_rer_status(int fd, int * status) +{ + *status = mfld_driver.rer_enabled; +} + +void get_rer_version(int fd, int * major, int * minor) +{ + mfldadvci->RerVersion(major, minor); +} + +cam_err_t get_ae_manual_shutter(int fd, float * time) +{ + if(ci_adv_Success != mfldadvci->AeGetManualShutter(time)) + return CAM_ERR_3A; + + return CAM_ERR_NONE; +} + +cam_err_t get_ae_manual_aperture(int fd, int *aperture) +{ + + *aperture = mfldadvci->AeGetManualAperture(); + + return CAM_ERR_NONE; +} + +cam_err_t get_ae_flash_mode(int fd, cam_flash_mode_t *mode) +{ + cam_flash_mode_t ci_mode; + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + ci_mode = mfldadvci->AeGetFlashMode(); + switch(ci_mode) + { + case advci_ae_flash_mode_auto: + *mode = CAM_LIGHT_FLASH_MODE_AUTO; + break; + case advci_ae_flash_mode_off: + *mode = CAM_LIGHT_FLASH_MODE_OFF; + break; + case advci_ae_flash_mode_on: + *mode = CAM_LIGHT_FLASH_MODE_ON; + break; + case advci_ae_flash_mode_day_sync: + *mode = CAM_LIGHT_FLASH_MODE_AUTO; + break; + case advci_ae_flash_mode_slow_sync: + *mode = CAM_LIGHT_FLASH_MODE_AUTO; + break; + default: + *mode = CAM_LIGHT_FLASH_MODE_AUTO; + return CAM_ERR_3A; + } + } + else + *mode = CAM_LIGHT_FLASH_MODE_OFF; + + return CAM_ERR_NONE; +} +cam_err_t set_ae_flash_mode(int fd, cam_flash_mode_t mode) +{ + advci_ae_flash_mode flash_mode; + + if (mfld_driver.sensor_type == SENSOR_TYPE_RAW) { + switch(mode) + { + case CAM_LIGHT_FLASH_MODE_AUTO: + flash_mode = advci_ae_flash_mode_auto; + break; + case CAM_LIGHT_FLASH_MODE_OFF: + flash_mode = advci_ae_flash_mode_off; + break; + case CAM_LIGHT_FLASH_MODE_ON: + flash_mode = advci_ae_flash_mode_on; + break; + default: + flash_mode = advci_ae_flash_mode_auto; + break; + } + + mfldadvci->AeSetFlashMode(flash_mode); + } + + return CAM_ERR_NONE; +} + + +cam_err_t get_awb_manual_color_temperature(int fd, int *ctemp) +{ + mfldadvci->AwbGetManualColorTemperature(ctemp); + return CAM_ERR_NONE; +} + + +struct sh_css_grid_info_maker_note { + /* 3A statistics grid: */ + unsigned int s3a_width; + unsigned int s3a_height; + unsigned int s3a_bqs_per_grid_cell; + /* DIS grid: */ + unsigned int dis_width; /* also used for vertical projections */ + unsigned int dis_height; /* also used for horizontal projections */ + unsigned int dis_bqs_per_grid_cell; + unsigned int dis_hor_coef_num; + unsigned int dis_ver_coef_num; +}; + + +gboolean cam_is_flash_used() +{ + return mfld_driver.flash_used; +} + diff --git a/gst-libs/atomisphal/mfld_cam.h b/gst-libs/atomisphal/mfld_cam.h new file mode 100644 index 0000000..240f5ad --- /dev/null +++ b/gst-libs/atomisphal/mfld_cam.h @@ -0,0 +1,308 @@ +/* Gstreamer MFLD camera source abstract Layer API + * Copyright (c) 2010 Intel Corporation + + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ +#ifndef _MFLD_CAM_H_ +#define _MFLD_CAM_H_ + +#include "mfld_cam_defs.h" +#include "mfld_driver.h" +#include "gstv4l2mfldadvci.h" +#include + +#define V4L2_COLORFX_BW 1 +#define V4L2_COLORFX_SKY_BLUE 6 + +/* init: Initialize the driver. V4l2camerasrc calls this immediately after + * the camera device has been opened and registers the device file + * descriptor to the driver. + */ + +/* Support the photography configure + * Capture Correction has removed from the photography interface + * We also implement it here ane export it as the camsrc property + * */ + +typedef enum +{ + CAM_AWB_MODE, /* wb_mode */ + CAM_GENERAL_EFFECT_TYPE, /* Tone Mode */ + CAM_GENERAL_SCENE_MODE, /* Tone Mode */ + CAM_LIGHT_FLASH_MODE, /* Flash Mode */ + CAM_EXPOSURE_MANUAL_TIME, /* Exposure value */ + CAM_EXPOSURE_MANUAL_APERTURE, /* Aperture Value */ + CAM_EXPOSURE_COMPENSATION, /* ev_compensation */ + CAM_EXPOSURE_ISO_VALUE, /* ISO Speed */ + CAM_FOCUS_MODE, /* Focus Mode */ + CAM_GENERAL_FLICKER_REDUCTION_MODE, /* flicker mode */ + CAM_NOISE_REDUCTION_MODE, /* Noise reduction */ + CAM_CAPTURE_CORRECTION, /* Capture Correction */ +} cam_photo_feature_t; + +typedef enum +{ + CAM_GAMMA_VALUE, /* Gamma Setting */ + CAM_CONTRAST_VALUE, /* Contrast Setting */ + CAM_BRIGHTNESS_VALUE /* Brightness Setting */ +} cam_tone_control_t; + +/* Supported white balance mode by our 3A library */ +typedef enum +{ + CAM_AWB_MODE_AUTO, + CAM_AWB_MODE_DAYLIGHT, + CAM_AWB_MODE_SUNSET, + CAM_AWB_MODE_CLOUDY, + CAM_AWB_MODE_TUNGSTEN, + CAM_AWB_MODE_FLUORESCENT +} cam_awb_mode_t; + +/* Color effect in the V4L2 */ +typedef enum +{ + CAM_GENERAL_EFFECT_TYPE_NORMAL = V4L2_COLORFX_NONE, + CAM_GENERAL_EFFECT_TYPE_GRAYSCALE = V4L2_COLORFX_BW, + CAM_GENERAL_EFFECT_TYPE_SEPIA , + CAM_GENERAL_EFFECT_TYPE_NEGATIVE , + CAM_GENERAL_EFFECT_TYPE_SKY_BLUE = V4L2_COLORFX_SKY_BLUE, + CAM_GENERAL_EFFECT_TYPE_GRASS_GREEN , + CAM_GENERAL_EFFECT_TYPE_SKIN_WHITEN , + CAM_GENERAL_EFFECT_TYPE_VIVID, +} cam_general_effect_t; + +/* Scence Mode are not supported by MFLD camera currently */ +typedef enum +{ + CAM_GENERAL_SCENE_MODE_CLOSEUP = 1000, + CAM_GENERAL_SCENE_MODE_PORTRAIT, + CAM_GENERAL_SCENE_MODE_LANDSCAPE, + CAM_GENERAL_SCENE_MODE_SPORT, + CAM_GENERAL_SCENE_MODE_NIGHT, + CAM_GENERAL_SCENE_MODE_AUTO, + CAM_GENERAL_SCENE_MODE_FIREWORKS, +} cam_scene_mode_t; + +/* Focus Status */ +typedef enum +{ + CAM_FOCUS_STATUS_IDLE, + CAM_FOCUS_STATUS_SUCCESS, + CAM_FOCUS_STATUS_RUNNING, + CAM_FOCUS_STATUS_FAIL +} cam_focus_status_t; + +/* capture mode */ +typedef enum +{ + CAM_CAPTURE_MODE_VIEWFINDER, + CAM_CAPTURE_MODE_STILL_CAPTURE, + CAM_CAPTURE_MODE_VIDEO_RECORD +} cam_capture_mode_t; + +typedef enum +{ + CAM_LIGHT_FLASH_MODE_AUTO, + CAM_LIGHT_FLASH_MODE_OFF, + CAM_LIGHT_FLASH_MODE_ON, + CAM_LIGHT_FLASH_MODE_FILL_IN, + CAM_LIGHT_FLASH_MODE_RED_EYE +} cam_flash_mode_t; + +typedef enum +{ + CAM_EXPOSURE_ISO_VALUE_1600, + CAM_EXPOSURE_ISO_VALUE_800, + CAM_EXPOSURE_ISO_VALUE_400, + CAM_EXPOSURE_ISO_VALUE_200, + CAM_EXPOSURE_ISO_VALUE_100, + CAM_EXPOSURE_ISO_VALUE_AUTO +} cam_exposure_iso_value_t; + + +typedef enum +{ + CAM_FOCUS_MODE_AUTO, /* GST_PHOTOGRAPHY_FOCUS_MODE_AUTO = 0 */ + CAM_FOCUS_MODE_MACRO, + CAM_FOCUS_MODE_FULL, + CAM_FOCUS_MODE_NORM, + CAM_FOCUS_MODE_CONTINUOUS, + CAM_FOCUS_MODE_TOUCH_AUTO, +} cam_focus_mode_t; + +typedef enum +{ + CAM_VIEWFINDER_MODE_VIEWFINDER, + CAM_VIEWFINDER_MODE_STILL_CAPTURE, + CAM_VIEWFINDER_MODE_VIDEO_RECORD +} cam_viewfinder_mode_t; + +/* Supported noise reduction mode + * These mode can be combiled together */ +typedef enum +{ + CAM_NOISE_REDUCTION_BAYER, + CAM_NOISE_REDUCTION_YCC, + CAM_NOISE_REDUCTION_TEMPORAL, + CAM_NOISE_REDUCTION_FIXED_PATTERN, + CAM_NOISE_REDUCTION_EXTRA, +} cam_noise_reduction_t; + +/* Know as advanced mode before. + * RER, Image Stable and GDC are post processing. + * Others are on the fly processing + */ +typedef enum +{ + CAM_CAPTURE_CORRECTION_GDC, /* Geometry Distortion Correction */ + CAM_CAPTURE_CORRECTION_CAC, /* Chromatic Aberration Correction */ + CAM_CAPTURE_CORRECTION_EE, /* Edge Enhancement, increates the sharpness of the image */ + CAM_CAPTURE_CORRECTION_SC, /* Shading Correction */ + CAM_CAPTURE_CORRECTION_BLC, /* Black Level Compensation */ + CAM_CAPTURE_CORRECTION_BPD, /* Bad Pixel Detection */ + CAM_CAPTURE_CORRECTION_RER, /* Red Eye reduction */ + CAM_CAPTURE_CORRECTION_DIS, /* Still Image Stabilization */ + CAM_CAPTURE_CORRECTION_DVS /* Video Stabilization */ +} cam_capture_correction_t; + +/* Flicker mode supported on SH 3A library */ +typedef enum +{ + CAM_GENERAL_FLICKER_REDUCTION_MODE_OFF, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_OFF = 0 */ + CAM_GENERAL_FLICKER_REDUCTION_MODE_50HZ, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_50HZ */ + CAM_GENERAL_FLICKER_REDUCTION_MODE_60HZ, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_60HZ */ + CAM_GENERAL_FLICKER_REDUCTION_MODE_AUTO, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_AUTO */ +} cam_flicker_reduction_mode_t; + +/* noise filter setting names */ +#define EENF_LNF_STRENGTH "luma-noise-filter-strength" +#define EENF_CNF_STRENGTH "chroma-noise-filter-strength" + +struct tone_control +{ + float gamma; + int contrast, brightness; +}; + +/* Camera Interal Status saved for upper layer */ +struct mfld_cam_settings_t +{ + int wb_mode; + int tone_mode; + int scene_mode; + int flash_mode; + int exposure, aperture; + int ev_compensation; + int iso_speed; + struct tone_control tone; + int flicker_mode, focus_mode; + int noise_reduction, capture_correction; + float zoom; +}; + +struct cam_capture_settings +{ + int mmap; + int ae, af, awb; + int dump_raw; + unsigned int raw_output_size; + int dump_image; +}; + +cam_err_t cam_driver_init (int fd, const char *sensor_id); + +cam_err_t cam_driver_deinit (int fd); + +/* Known as read_settings before: mfldcamsrc tells the driver to retrieve settings from + * device and store them into #GstPhotoSettings structure. + */ +cam_err_t cam_feature_get (int fd, cam_photo_feature_t feature, int *value); + +/* Known as write_settings before: Called when some settings in mfldcamsrc's #GstPhotoSettings + * structure has changed. Driver needs to forward the settings to device. + */ +cam_err_t cam_feature_set (int fd, cam_photo_feature_t feature, int value); + +cam_err_t cam_set_capture_correction(int fd, cam_capture_correction_t mode, int on); + +cam_err_t cam_set_tone_control(int fd, cam_tone_control_t tone, struct tone_control *value); + +cam_err_t cam_set_autoexposure (int fd, int on); + +cam_err_t cam_set_autofocus (int on); + +cam_err_t cam_set_frame_rate( float frame_rate); + +int cam_checkfocus_status (cam_focus_status_t * status, int force_update); + +cam_err_t cam_get_shake_risk (int fd, int risk); + +cam_err_t cam_set_flash (int fd, int on); + +cam_err_t cam_get_flash_intensity (int fd, int on); + +cam_err_t cam_set_flash_intensity (int fd, int on); + +cam_err_t cam_set_capture_mode (int fd, cam_capture_mode_t mode); + +cam_err_t cam_set_capture_fmt(int fd, unsigned int width,unsigned int height, + unsigned int fourcc); + +cam_err_t cam_post_process(int fd); + +cam_err_t cam_get_makernote (int fd, unsigned char *buf, unsigned size); + +cam_err_t cam_start_capture(int fd); + +cam_err_t cam_stop_capture(int fd); + +cam_err_t cam_capture_frames(int fd, struct v4l2_buffer *buffer, + struct cam_capture_settings *capture_settings); + +cam_err_t cam_set_zoom(int fd, float zoom); + +void mfldcam_3a_start (void); +void mfldcam_3a_stop (void); + +void libmfld_cam_init (GstV4l2MFLDAdvCI *advci); +void libmfld_cam_dispose (void); + + +cam_err_t get_ae_windows_num(int fd, int * num); +void get_awb_version(int fd, int * major, int * minor); +void get_ae_version(int fd, int * major, int * minor); +void get_rer_status(int fd, int * status); +void get_rer_version(int fd, int * major, int * minor); +void get_af_version(int fd, int * major, int * minor); +cam_err_t cam_get_focus_posi(int fd, unsigned * posi); +void get_af_result(int fd, unsigned * result); +cam_err_t get_af_status(int fd, unsigned * status); +void cam_set_af_ae_window(advci_window *window); + +cam_err_t get_exp_mode(int fd, cam_scene_mode_t *expmode); +void get_ae_bias(int fd, int * bias); +cam_err_t get_ae_manual_shutter(int fd, float *time); +cam_err_t get_ae_manual_aperture(int fd, int *aperture); +cam_err_t get_ae_flash_mode(int fd, cam_flash_mode_t *mode); +cam_err_t set_ae_flash_mode(int fd, cam_flash_mode_t mode); + +int cam_find_item_new (const int table[][2], const int item, int reverse); + +cam_err_t get_awb_manual_color_temperature(int fd, int *ctemp); +cam_err_t get_wb_mode (int *wb_mode); +gboolean cam_is_flash_used(void); + +#endif diff --git a/gst-libs/atomisphal/mfld_cam_defs.h b/gst-libs/atomisphal/mfld_cam_defs.h new file mode 100644 index 0000000..7bf41c5 --- /dev/null +++ b/gst-libs/atomisphal/mfld_cam_defs.h @@ -0,0 +1,36 @@ +/* Gstreamer MFLD camera source abstract Layer API + * Copyright (c) 2010 Intel Corporation + + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ +#ifndef _MFLD_CAM_DEFS_H +#define _MFLD_CAM_DEFS_H +typedef enum +{ + CAM_ERR_NONE, + CAM_ERR_PARAM, + CAM_ERR_UNSUPP, + CAM_ERR_HW, + CAM_ERR_NOT_OPEN, + CAM_ERR_SYS, + CAM_ERR_LEXIT, + CAM_ERR_DEPRECATED, + CAM_ERR_INVALID_STATE, + CAM_ERR_INTERNAL, + CAM_ERR_3A +} cam_err_t; + +#endif diff --git a/gst-libs/atomisphal/mfld_driver.c b/gst-libs/atomisphal/mfld_driver.c new file mode 100644 index 0000000..bbffd18 --- /dev/null +++ b/gst-libs/atomisphal/mfld_driver.c @@ -0,0 +1,788 @@ +/* Gstreamer MFLD camera source abstract Layer API + * Copyright (c) 2010 Intel Corporation + + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/* + * This file provide the IOCTL wrap to the Medfield v4l2 drivers + * It provides the following features. + * Image stablization + * Video stablization + * Skin tone detection/correction FIXME + * Image effect (Color Sapce Convertion) + * Noise Reduction (XNR, TNR, BNR, YNR FPN) + * Color Enhance Ment + * Edege Enhancement + * False Color Correction + * MACC + * Bad Pixel detection + * Lens shading correction + * black level + * + * digital zoom + * gamma + * tone control FIXME + * YUV scaler FIXME + * CAC/GDC + * super impose FIXME + * + * Bayer Scaling ? + * + */ +#include +#include +#include +#include +#include +#include +#include "mfld_driver.h" +#include "sh_css_types.h" +#include "atomisp_v4l2.h" +#include "atomisp.h" +#define CAM_ISP_IS_OPEN(fd) (fd > 0) + +static int +xioctl (int fd, int request, void *arg, const char *name) +{ + int ret; + + cam_driver_dbg ("ioctl %s ", name); + + do { + ret = ioctl (fd, request, arg); + } while (-1 == ret && EINTR == errno); + + if (ret < 0) + cam_driver_dbg ("failed: %s\n", strerror (errno)); + else + cam_driver_dbg ("ok\n"); + + return ret; +} + +/* Utilities for debug message and error message output + * */ +void +cam_driver_dbg (const char *format, ...) +{ + va_list ap; + const char *env; + if ((env = getenv ("LIBMFLDCAM_DEBUG")) && strstr (env, "verbose")) { + va_start (ap, format); + vfprintf (stdout, format, ap); + va_end (ap); + } +} + +static const char *cameralib_error_map[] = { + "CAM_ERR_NONE", + "CAM_ERR_PARAM", + "CAM_ERR_UNSUPP", + "CAM_ERR_HW", + "CAM_ERR_NOT_OPEN", + "CAM_ERR_SYS", + "CAM_ERR_LEXIT", + "CAM_ERR_DEPRECATED", + "CAM_ERR_INVALID_STATE", + "CAM_ERR_INTERNAL", + "CAM_ERR_3A" +}; + +void +cam_err_print (cam_err_t err) +{ + if ((err < CAM_ERR_NONE) || (err > CAM_ERR_3A)) { + cam_driver_dbg (" %s Wrong error number in lib camera\n", __func__); + return; + } + + cam_driver_dbg ("%s\n", cameralib_error_map[err]); +} + +/****************************************************** + * cam_driver_get_attribute(): + * try to get the value of one specific attribute + * return value: CAM_ERR_NONE for success + * others are errors + ******************************************************/ +cam_err_t +cam_driver_get_attribute (int fd, int attribute_num, int *value, char *name) +{ + struct v4l2_control control; + + cam_driver_dbg ("getting value of attribute %d: %s\n", attribute_num, name); + + if (!CAM_ISP_IS_OPEN (fd)) + return CAM_ERR_NOT_OPEN; + + control.id = attribute_num; + + if (ioctl (fd, VIDIOC_G_CTRL, &control) < 0) + goto ctrl_failed1; + + *value = control.value; + + return CAM_ERR_NONE; + +ctrl_failed1: + { + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_USER; + controls.count = 1; + controls.controls = &control; + + control.id = attribute_num; + + if (ioctl (fd, VIDIOC_G_EXT_CTRLS, &controls) < 0) + goto ctrl_failed2; + + *value = control.value; + + return CAM_ERR_NONE; + + } + +ctrl_failed2: + { + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_CAMERA; + controls.count = 1; + controls.controls = &control; + + control.id = attribute_num; + + if (ioctl (fd, VIDIOC_G_EXT_CTRLS, &controls) < 0) + goto ctrl_failed3; + + *value = control.value; + + return CAM_ERR_NONE; + + } + + /* ERRORS */ +ctrl_failed3: + { + cam_driver_dbg ("Failed to get value for control %d on device '%d'.", + attribute_num, fd); + return CAM_ERR_SYS; + } +} + +/****************************************************** + * cam_driver_set_attribute(): + * try to set the value of one specific attribute + * return value: CAM_ERR_NONE for success + * others are errors + ******************************************************/ +cam_err_t +cam_driver_set_attribute (int fd, int attribute_num, const int value, + const char *name) +{ + struct v4l2_control control; + + cam_driver_dbg ("setting value of attribute [%s] to %d\n", name, value); + + if (!CAM_ISP_IS_OPEN (fd)) + return CAM_ERR_NOT_OPEN; + + control.id = attribute_num; + control.value = value; + if (ioctl (fd, VIDIOC_S_CTRL, &control) < 0) + goto ctrl_failed1; + + return CAM_ERR_NONE; + +ctrl_failed1: + { + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_CAMERA; + controls.count = 1; + controls.controls = &control; + + control.id = attribute_num; + control.value = value; + + if (ioctl (fd, VIDIOC_S_EXT_CTRLS, &controls) < 0) + goto ctrl_failed2; + + return CAM_ERR_NONE; + } + +ctrl_failed2: + { + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_USER; + controls.count = 1; + controls.controls = &control; + + control.id = attribute_num; + control.value = value; + + if (ioctl (fd, VIDIOC_S_EXT_CTRLS, &controls) < 0) + goto ctrl_failed3; + + return CAM_ERR_NONE; + } + + /* ERRORS */ +ctrl_failed3: + { + cam_driver_dbg + ("Failed to set value %d for control %d on device '%d', %s\n.", value, + attribute_num, fd, strerror (errno)); + return CAM_ERR_SYS; + } +} + +static struct atomisp_gamma_table g_gamma_table; + +/* Gamma configuration + * Also used by extended dymanic range and tone control + */ +struct Camera_gm_config +{ + /* [gain] 1.0..2.4 Gamma value. */ + float GmVal; + int GmToe; /* [intensity] Toe position of S-curve. */ + int GmKne; /* [intensity] Knee position of S-curve */ + + /* [gain] 100%..400% Magnification factor of dynamic range + * (1.0 for normal dynamic range) */ + int GmDyr; + + /* Minimum output levels: Set to 0 for 256 full 8it level output or + * 16 for ITU.R601 16-235 output.*/ + unsigned char GmLevelMin; + /* Maximum output levels: Set to 128 for 256 full 8it level output or + * 235 for ITU.R601 16-235 output */ + unsigned char GmLevelMax; +}; + +static struct Camera_gm_config g_cfg_gm = { + .GmVal = 1.5, + .GmToe = 123, + .GmKne = 287, + .GmDyr = 256, + .GmLevelMin = 0, + .GmLevelMax = 255, +}; + +/* + Make gamma table +*/ +static void +AutoGmLut (unsigned short *pptDst, struct Camera_gm_config *cfg_gm) +{ + /* cannot use this on cirrus because of missing powf implementation */ + const double adbToe = (double) (cfg_gm->GmToe) / 1024.; // [u5.11] -> double + const double adbKnee = (double) (cfg_gm->GmKne) / 1024.; // [u5.11] -> double + const double adbDRange = (double) (cfg_gm->GmDyr) / 256.; // [u8.8] -> double + const double adbReGammaVal = 1 / (double) (cfg_gm->GmVal); // 1/GmVal : [u8.8] -> double + const double adbTmpKnee = + adbKnee / (adbDRange * adbKnee + adbDRange - adbKnee); + const double adbTmpToe = + ((1. + adbTmpKnee) * adbToe * adbKnee) / (adbDRange * (1. + + adbKnee) * adbTmpKnee); + const double adbDx = 1. / (double) 1024; /* 1024 is the gamma table size */ + double adbX = (double) 0.; + int asiCnt; + + for (asiCnt = 0; asiCnt < 1024; asiCnt++, adbX += adbDx) { + const double adbDeno = (1. + adbTmpToe) * (1. + adbTmpKnee) * adbX * adbX; + const double adbNume = (adbX + adbTmpToe) * (adbX + adbTmpKnee); + const double adbY = + (adbNume == 0.) ? 0. : pow (adbDeno / adbNume, adbReGammaVal); + short auiTmp = (short) ((double) 255 * adbY + 0.5); + + if (auiTmp < cfg_gm->GmLevelMin) { + auiTmp = cfg_gm->GmLevelMin; + } else if (auiTmp > cfg_gm->GmLevelMax) { + auiTmp = cfg_gm->GmLevelMax; + } + pptDst[asiCnt] = auiTmp; + } +} + +cam_err_t +cam_driver_set_fpn (int fd, int on) +{ + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_set_sc (int fd, int on) +{ +// TODO check this + return CAM_ERR_NONE; +} + +/* Bad Pixel Detection*/ +cam_err_t +cam_driver_set_bpd (int fd, int on) +{ + return cam_driver_set_attribute (fd, V4L2_CID_ATOMISP_BAD_PIXEL_DETECTION, + on, "Bad Pixel Detection"); +} +cam_err_t +cam_driver_get_bpd (int fd, int *on) +{ + return cam_driver_get_attribute (fd, V4L2_CID_ATOMISP_BAD_PIXEL_DETECTION, + on, "Bad Pixel Detection"); +} + +cam_err_t +cam_driver_set_bnr (int fd, int on) +{ + struct atomisp_nr_config bnr; + if (on) { + bnr.bnr_gain = 60000; + bnr.direction = 3200; + bnr.threshold_cb = 64; + bnr.threshold_cr = 64; + } else { + memset(&bnr, 0, sizeof(bnr)); + } + cam_driver_dbg("%s on:%d\n",__func__,on); + + // TODO not configured in Android + // check later status of this + //return xioctl (fd, ATOMISP_IOC_G_NR, &bnr, "Bayer NR"); + return CAM_ERR_NONE; +} + +/* False Color Correction, Demosaicing */ +cam_err_t +cam_driver_set_fcc (int fd, int on) +{ + return cam_driver_set_attribute (fd, V4L2_CID_ATOMISP_FALSE_COLOR_CORRECTION, + on, "False Color Correction"); +} + +cam_err_t +cam_driver_set_ynr (int fd, int on) +{ + /* YCC NR use the same parameter as Bayer NR */ + cam_driver_dbg("%s\n",__func__); + + return cam_driver_set_bnr(fd, on); +} + +cam_err_t +cam_driver_set_ee (int fd, int on) +{ + struct atomisp_ee_config ee; + if (on) { + ee.gain = 8192; + ee.threshold = 128; + ee.detail_gain = 2048; + } else { + ee.gain = 0; + ee.threshold = 0; + ee.detail_gain = 0; + } + cam_driver_dbg("%s on:%d\n",__func__,on); + + return xioctl (fd, ATOMISP_IOC_S_EE, &ee, "Edege Ehancement"); +} + +/*Black Level Compensation */ +cam_err_t +cam_driver_set_blc (int fd, int on) +{ + static struct atomisp_ob_config ob_off; + struct atomisp_ob_config ob_on; + static int current_status = 0; + + cam_driver_dbg("Set Black Level compensation\n"); + if (on && current_status) { + cam_driver_dbg("Black Level Compensation Already On\n"); + return CAM_ERR_NONE; + } + + if (!on && !current_status) { + cam_driver_dbg("Black Level Composition Already Off\n"); + return CAM_ERR_NONE; + } + + ob_on.mode = atomisp_ob_mode_fixed; + ob_on.level_gr = 0; + ob_on.level_r = 0; + ob_on.level_b = 0; + ob_on.level_gb = 0; + ob_on.start_position = 0; + ob_on.end_position = 63; + + cam_driver_dbg("%s on:%d\n",__func__,on); + + if (on) { + if (xioctl (fd, ATOMISP_IOC_G_BLACK_LEVEL_COMP, &ob_off, "blc") < 0 ) { + cam_driver_dbg("Error Get black level composition\n"); + return CAM_ERR_SYS; + } + if (xioctl (fd, ATOMISP_IOC_S_BLACK_LEVEL_COMP, &ob_on, "blc") < 0) { + cam_driver_dbg("Error Set black level composition\n"); + return CAM_ERR_SYS; + } + } else { + if (xioctl (fd, ATOMISP_IOC_S_BLACK_LEVEL_COMP, &ob_off, "blc") < 0) { + cam_driver_dbg("Error Set black level composition\n"); + return CAM_ERR_SYS; + } + } + current_status = on; + return CAM_ERR_NONE; +} + + +cam_err_t +cam_driver_set_tnr (int fd, int on) +{ + struct atomisp_tnr_config tnr; + cam_driver_dbg("%s on:%d\n",__func__,on); + return xioctl (fd, ATOMISP_IOC_S_TNR, &tnr, "ATOMISP_IOC_S_TNR"); +} + +cam_err_t +cam_driver_set_xnr (int fd, int on) +{ + cam_driver_dbg("%s on:%d\n",__func__,on); + return xioctl (fd, ATOMISP_IOC_S_XNR, &on, "ATOMISP_IOC_S_XNR"); +} + +cam_err_t +cam_driver_set_cac (int fd, int on) +{ + return cam_driver_set_attribute (fd, V4L2_CID_ATOMISP_POSTPROCESS_GDC_CAC, + on, "CAC"); +} + +/* Configure the color effect Mode in the kernel + */ + +cam_err_t +cam_driver_set_tone_mode (int fd, enum v4l2_colorfx colorfx) +{ + return cam_driver_set_attribute (fd, V4L2_CID_COLORFX, colorfx, "Color Effect"); +} + +cam_err_t +cam_driver_get_tone_mode (int fd, int *colorfx) +{ + return cam_driver_get_attribute (fd, V4L2_CID_COLORFX, colorfx, "Color Effect"); +} + +static cam_err_t +cam_driver_set_gamma_tbl (int fd, struct atomisp_gamma_table *g_tbl) +{ + int ret; + ret = xioctl (fd, ATOMISP_IOC_S_ISP_GAMMA, g_tbl, "S_GAMMA_TBL"); + if (ret < 0) + return CAM_ERR_SYS; + else + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_init_gamma (int fd) +{ + int ret; + ret = xioctl (fd, ATOMISP_IOC_G_ISP_GAMMA, &g_gamma_table, "G_GAMMA_TBL"); + if (ret < 0) + return CAM_ERR_SYS; + else + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_set_mipi_interrupt(int fd, int enable) +{ + int ret; + + ret = xioctl (fd, ATOMISP_IOC_S_MIPI_IRQ, &enable, "MIPI_IRQ" ); + if (ret < 0) + return CAM_ERR_SYS; + else + return CAM_ERR_NONE; +} + + +cam_err_t +cam_driver_set_gamma (int fd, float gamma) +{ + g_cfg_gm.GmVal = gamma; + AutoGmLut (g_gamma_table.data, &g_cfg_gm); + + return cam_driver_set_gamma_tbl (fd, &g_gamma_table); +} + +cam_err_t +cam_driver_set_contrast (int fd, int contrast, int brightness) +{ + int i, tmp; + for (i = 0; i < 1024; i++) { + tmp = (g_gamma_table.data[i] * contrast >> 8) + brightness; + + if (tmp < g_cfg_gm.GmLevelMin) { + tmp = g_cfg_gm.GmLevelMin; + } else if (tmp > g_cfg_gm.GmLevelMax) { + tmp = g_cfg_gm.GmLevelMax; + } + + g_gamma_table.data[i] = tmp; + } + return cam_driver_set_gamma_tbl (fd, &g_gamma_table); +} + +/* Description + * VF Scaling for View Finder + * Parameters: + * factor : scaling factor, 0..2. Power of 1/2 + * TBD + * Waiting for SH's implementation for this feature + */ +cam_err_t +cam_driver_set_vf (int fd, int factor, int updatek) +{ + cam_driver_dbg ("%s\n", __func__); +#if 0 + s_ispparm *w_ispparm = &g_ispparam->w_ispparm; + w_ispparm->vf_wind_len_x = w_ispparm->vf_wind_len_x * (factor); + w_ispparm->vf_wind_len_y = w_ispparm->vf_wind_len_y * (factor); +#endif + return CAM_ERR_NONE; +} + + +/* SuperImpose + * TBD + * Waiting for SH provide the more useful API to do the image/vide overlay. + */ +cam_err_t +cam_driver_set_si (int fd, int on) +{ + cam_driver_dbg ("%s\n", __func__); + //convert the overlay file to Y file, U file and V file + //Store the Y U V file name to sh_si_config + //superimpose_file_read((sh_si_config *) arg); + //Call the kernel to store the pattern to xmem. + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_set_gdc (int fd, int on) +{ + return cam_driver_set_attribute (fd, V4L2_CID_ATOMISP_POSTPROCESS_GDC_CAC, + on, "GDC"); +} + +cam_err_t +cam_driver_set_dvs (int fd, int on) +{ + return cam_driver_set_attribute(fd, V4L2_CID_ATOMISP_VIDEO_STABLIZATION, + on, "Video Stabilization"); +} + +cam_err_t +cam_driver_set_exposure (int fd, unsigned int exposure) +{ + if (exposure == 0) + return CAM_ERR_NONE; + return cam_driver_set_attribute (fd, V4L2_CID_EXPOSURE_ABSOLUTE, exposure, + "exposure"); +} + +cam_err_t +cam_driver_get_exposure (int fd, int *exposure) +{ + return cam_driver_get_attribute (fd, V4L2_CID_EXPOSURE_ABSOLUTE, exposure, "Exposure"); +} + +cam_err_t +cam_driver_set_aperture (int fd, unsigned int aperture) +{ +// TODO check this + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_get_aperture (int fd, int *aperture) +{ +// TODO check this + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_set_iso_speed (int fd, unsigned int iso_speed) +{ +// TODO check this + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_get_iso_speed (int fd, int *iso_speed) +{ + // iso speed 0 = not defined + *iso_speed = 0; + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_set_focus_posi (int fd, int focus) +{ + return cam_driver_set_attribute (fd, V4L2_CID_FOCUS_ABSOLUTE, focus, "Focus"); +} + +cam_err_t +cam_driver_get_focus_posi (int fd, int *focus) +{ + return cam_driver_get_attribute (fd, V4L2_CID_FOCUS_ABSOLUTE, focus, "Focus"); +} + +cam_err_t +cam_driver_set_zoom (int fd, unsigned int zoom) +{ + return cam_driver_set_attribute (fd, V4L2_CID_ZOOM_ABSOLUTE, zoom, "zoom"); +} + +cam_err_t +cam_driver_get_zoom (int fd, unsigned int *zoom) +{ + return cam_driver_get_attribute (fd, V4L2_CID_ZOOM_ABSOLUTE, zoom, "Zoom"); +} + +cam_err_t +cam_driver_set_autoexposure (int fd, enum v4l2_exposure_auto_type expo) +{ + return cam_driver_set_attribute (fd, V4L2_CID_EXPOSURE_AUTO, expo, "auto exposure"); +} + +cam_err_t +cam_driver_get_makernote (int fd, unsigned char *buf, unsigned size) +{ + int ret; + + ret = xioctl (fd, ATOMISP_IOC_ISP_MAKERNOTE, buf, "G_MAKERNOTE"); + if (ret < 0) + return CAM_ERR_SYS; + else + return CAM_ERR_NONE; +} + +cam_err_t +cam_driver_set_led_flash (int fd, int id, int value) +{ + int ret; + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_CAMERA; + controls.count = 1; + controls.controls = &control; + + control.id = id; + control.value = value; + + ret = xioctl (fd, VIDIOC_S_EXT_CTRLS, &controls, "flash settings"); + if (ret < 0) + return CAM_ERR_SYS; + else + return CAM_ERR_NONE; +} + +void +led_flash_trigger (int fd, int duration, int intensity) +{ + cam_err_t ret; + + cam_driver_dbg("%s\n",__func__); + + ret = cam_driver_set_led_flash (fd, V4L2_CID_FLASH_STROBE, 0); + if (ret != CAM_ERR_NONE) + { + cam_driver_dbg ("%s: Error flash ioctl %d\n", __func__, 0); + } + ret = cam_driver_set_led_flash (fd, V4L2_CID_FLASH_STROBE, 1); + if (ret != CAM_ERR_NONE) + { + cam_driver_dbg ("%s: Error flash ioctl %d\n", __func__, 1); + } + ret = cam_driver_set_led_flash (fd, V4L2_CID_FLASH_INTENSITY, intensity); + if (ret != CAM_ERR_NONE) + { + cam_driver_dbg ("%s: Error flash ioctl %d\n", __func__, 2); + } + ret = cam_driver_set_led_flash (fd, V4L2_CID_FLASH_TIMEOUT, duration); + if (ret != CAM_ERR_NONE) + { + cam_driver_dbg ("%s: Error flash ioctl %d\n", __func__, 3); + } +} + +void +led_flash_off (int fd) +{ + cam_driver_dbg("%s\n",__func__); + cam_driver_set_attribute(fd, V4L2_CID_FLASH_MODE, ATOMISP_FLASH_MODE_OFF, "Flash mode"); + cam_driver_set_attribute (fd, V4L2_CID_FLASH_INDICATOR_INTENSITY, 0, "indication intensity"); + cam_driver_set_attribute (fd, V4L2_CID_FLASH_TORCH_INTENSITY, 0, "Torch intensity"); + cam_driver_set_led_flash (fd, V4L2_CID_FLASH_STROBE, 0); + +} + +cam_err_t +cam_driver_set_flash_mode (int fd,int mode) +{ + cam_driver_dbg ("%s: mode %d\n", __func__, mode); + + return cam_driver_set_attribute (fd, V4L2_CID_FLASH_MODE, mode, "Flash Mode"); +} + +cam_err_t +cam_driver_set_indication_intensity (int fd,int intensity) +{ + cam_driver_dbg ("%s: intensity %d\n", __func__, intensity); + + return cam_driver_set_attribute (fd, V4L2_CID_FLASH_INDICATOR_INTENSITY, intensity, "indication intensity"); +} + +cam_err_t +cam_driver_set_torch(int fd, int on) +{ + cam_driver_dbg ("%s: state %d\n", __func__, on); + cam_err_t ret; + if (on) { + ret = cam_driver_set_attribute(fd, V4L2_CID_FLASH_MODE, ATOMISP_FLASH_MODE_TORCH, "Flash mode"); + if (ret == CAM_ERR_NONE) + ret = cam_driver_set_attribute (fd, V4L2_CID_FLASH_TORCH_INTENSITY, TORCH_INTENSITY, "Torch intensity"); + } + else { + ret = cam_driver_set_attribute(fd, V4L2_CID_FLASH_MODE, ATOMISP_FLASH_MODE_OFF, "Flash mode"); + if(ret == CAM_ERR_NONE) + ret = cam_driver_set_attribute (fd, V4L2_CID_FLASH_TORCH_INTENSITY, 0, "Torch intensity"); + } + cam_driver_dbg ("%s: ret %d\n", __func__, ret); + return ret; +} diff --git a/gst-libs/atomisphal/mfld_driver.h b/gst-libs/atomisphal/mfld_driver.h new file mode 100644 index 0000000..7264f8e --- /dev/null +++ b/gst-libs/atomisphal/mfld_driver.h @@ -0,0 +1,122 @@ +/* Gstreamer MFLD camera source abstract Layer API + * Copyright (c) 2010 Intel Corporation + + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ +#ifndef _MFLD_DRIVER_H +#define _MFLD_DRIVER_H +#include +#include +#include "mfld_cam_defs.h" + +#define ON 1 +#define OFF 0 + +#define INDICATOR_INTENSITY 10 /* 10% */ +#define TORCH_INTENSITY 20 /* 20% */ + + +void cam_err_print (cam_err_t err); + +/* Color effect settings */ +cam_err_t cam_driver_set_tone_mode (int fd, enum v4l2_colorfx colorfx); +cam_err_t cam_driver_get_tone_mode (int fd, int *colorfx); + +/* ********************************************************** + * Noise Reduction Part + * **********************************************************/ + +/* Fixed Pattern Noise Reduction */ +cam_err_t cam_driver_set_fpn (int fd, int on); + +/* Bayer Noise Reduction */ +cam_err_t cam_driver_set_bnr (int fd, int on); + +/* YNR (Y Noise Reduction), YEE (Y Edge Enhancement) */ +cam_err_t cam_driver_set_ynr (int fd, int on); + +/* Temporal Noise Reduction */ +cam_err_t cam_driver_set_tnr (int fd, int on); + +/* Extra Noise Reduction */ +cam_err_t cam_driver_set_xnr (int fd, int on); + +/* ********************************************************** + * Advanced Features Part + * **********************************************************/ + +/* Shading Correction */ +cam_err_t cam_driver_set_sc (int fd, int on); + +/* Bad Pixel Detection */ +cam_err_t cam_driver_set_bpd (int fd, int on); +cam_err_t cam_driver_get_bpd (int fd, int *on); + +/* False Color Correction, Demosaicing */ +cam_err_t cam_driver_set_fcc (int fd, int on); + +/* Edge Enhancement, Sharpness */ +cam_err_t cam_driver_set_ee (int fd, int on); + +/* Black Level Compensation */ +cam_err_t cam_driver_set_blc (int fd, int on); + +/* Chromatic Aberration Correction */ +cam_err_t cam_driver_set_cac (int fd, int on); + +/* GDC : Geometry Distortion Correction */ +cam_err_t cam_driver_set_gdc (int fd, int on); + +/* Exposure Value setting */ +cam_err_t cam_driver_set_exposure(int fd, unsigned int exposure); + +/* aperture settings */ +cam_err_t cam_driver_set_aperture(int fd, unsigned int aperture); +cam_err_t cam_driver_set_ev_compensation(int fd, unsigned int ev_comp); +cam_err_t cam_driver_set_iso_speed(int fd, unsigned int iso_speed); +cam_err_t cam_driver_set_focus_posi(int fd, int focus); + +cam_err_t cam_driver_set_zoom(int fd, unsigned int zoom); +cam_err_t cam_driver_set_dvs(int fd, int on); +cam_err_t cam_driver_set_autoexposure(int fd, enum v4l2_exposure_auto_type expo); + +cam_err_t cam_driver_set_gamma (int fd, float gamma); +cam_err_t cam_driver_init_gamma(int fd); +cam_err_t cam_driver_get_exposure(int fd, int *exposure); +cam_err_t cam_driver_get_iso_speed(int fd, int *iso_speed); +cam_err_t cam_driver_get_focus_posi(int fd, int *focus); + +cam_err_t cam_driver_set_contrast (int fd, int contrast, int brightness); + +void cam_driver_dbg(const char *format, ...); + +cam_err_t cam_driver_get_makernote (int fd, unsigned char *buf, unsigned size); + +cam_err_t cam_driver_set_led_flash (int fd, int id, int value); +void led_flash_trigger (int fd, int duration, int intensity); +void led_flash_off (int fd); + +cam_err_t cam_driver_get_aperture (int fd, int *aperture); + +cam_err_t cam_driver_set_flash_mode(int fd,int mode); + +cam_err_t cam_driver_set_mipi_interrupt(int fd, int enable); + +cam_err_t cam_driver_set_indication_intensity (int fd,int intensity); +cam_err_t cam_driver_set_torch(int fd, int on); + + +#endif /* _MFLD_DRIVER_H */ diff --git a/gst-libs/atomisphal/sh_css_types.h b/gst-libs/atomisphal/sh_css_types.h new file mode 100644 index 0000000..db05cc6 --- /dev/null +++ b/gst-libs/atomisphal/sh_css_types.h @@ -0,0 +1,798 @@ +/* +* Support for Medfield PNW Camera Imaging ISP subsystem. +* +* Copyright (c) 2010 Intel Corporation. All Rights Reserved. +* +* Copyright (c) 2010 Silicon Hive www.siliconhive.com. +* +* This program is free software; you can redistribute it and/or +* modify it under the terms of the GNU General Public License version +* 2 as published by the Free Software Foundation. +* +* This program is distributed in the hope that it will be useful, +* but WITHOUT ANY WARRANTY; without even the implied warranty of +* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +* GNU General Public License for more details. +* +* You should have received a copy of the GNU General Public License +* along with this program; if not, write to the Free Software +* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA +* 02110-1301, USA. +* +*/ + +#ifndef _SH_CSS_TYPES_H_ +#define _SH_CSS_TYPES_H_ + +/* This code is also used by Silicon Hive in a simulation environment + * Therefore, the following macro is used to differentiate when this + * code is being included from within the Linux kernel source + */ +#ifdef __KERNEL__ +#include +#include /* for memcpy */ +#else +#include /* for the print function */ +#include /* for size_t */ +#include /* for memcpy */ +#ifdef STDC99 +#include +#else +#include "sh_css_bool.h" +#endif /* STDC99 */ +#endif + +#define SH_CSS_MAJOR 0 +#define SH_CSS_MINOR 2 +#define SH_CSS_REVISION 5 + +#define SH_CSS_MACC_NUM_AXES 16 +#define SH_CSS_MACC_NUM_COEFS 4 +#define SH_CSS_MORPH_TABLE_NUM_PLANES 6 +#define SH_CSS_SC_NUM_COLORS 4 +#define SH_CSS_CTC_TABLE_SIZE 1024 +#define SH_CSS_GAMMA_TABLE_SIZE 1024 +#define SH_CSS_DIS_NUM_COEF_TYPES 6 +#define SH_CSS_DIS_COEF_TYPES_ON_DMEM 2 +#define SH_CSS_CTC_COEF_SHIFT 13 +#define SH_CSS_GAMMA_GAIN_K_SHIFT 13 + +/* Fixed point types. + * NOTE: the 16 bit fixed point types actually occupy 32 bits + * to save on extension operations in the ISP code. + */ +#define u0_16 unsigned int /* unsigned 0.16 fixed point type */ +#define u2_14 unsigned int /* unsigned 2.14 fixed point type */ +#define u5_11 unsigned int /* unsigned 5.11 fixed point type */ +#define u8_8 unsigned int /* unsigned 8.8 fixed point type */ +#define s0_15 signed int /* signed 0.15 fixed point type */ + +/* Errors, these values are used as the return value for most + functions in this API. These can be translated into a human readable + string (see below). */ +enum sh_css_err { + sh_css_success, + sh_css_err_internal_error, + sh_css_err_conflicting_mipi_settings, + sh_css_err_unsupported_configuration, + sh_css_err_mode_does_not_have_viewfinder, + sh_css_err_input_resolution_not_set, + sh_css_err_unsupported_input_mode, + sh_css_err_cannot_allocate_memory, + sh_css_err_invalid_arguments, + sh_css_err_too_may_colors, + sh_css_err_overlay_frame_missing, + sh_css_err_overlay_frames_too_big, + sh_css_err_unsupported_frame_format, + sh_css_err_frames_mismatch, + sh_css_err_overlay_not_set, + sh_css_err_not_implemented, + sh_css_err_invalid_frame_format, + sh_css_err_unsupported_resolution, + sh_css_err_scaling_factor_out_of_range, + sh_css_err_cannot_obtain_shading_table, + sh_css_err_interrupt_error, + sh_css_err_unexpected_interrupt, + sh_css_err_interrupts_not_enabled, + sh_css_err_system_not_idle, + sh_css_err_unsupported_input_format, + sh_css_err_not_enough_input_lines, + sh_css_err_not_enough_input_columns, + sh_css_err_illegal_resolution, + sh_css_err_effective_input_resolution_not_set, + sh_css_err_viewfinder_resolution_too_wide, + sh_css_err_viewfinder_resolution_exceeds_output, + sh_css_err_mode_does_not_have_grid, + sh_css_err_mode_does_not_have_raw_output +}; + +/* Input modes, these enumerate all supported input modes: + * - Sensor: data from a sensor coming into the MIPI interface + * - FIFO: data from the host coming into the GP FIFO + * - TPG: data coming from the test pattern generator + * - PRBS: data coming from the Pseudo Random Bit Sequence + * - Memory: data coming from DDR + */ +enum sh_css_input_mode { + SH_CSS_INPUT_MODE_SENSOR, + SH_CSS_INPUT_MODE_FIFO, + SH_CSS_INPUT_MODE_TPG, + SH_CSS_INPUT_MODE_PRBS, + SH_CSS_INPUT_MODE_MEMORY +}; + +/* The MIPI interface can be used in two modes, one with one + * lane and one with 4 lanes. + */ +enum sh_css_mipi_port { + SH_CSS_MIPI_PORT_1LANE, + SH_CSS_MIPI_PORT_4LANE +}; + +/* The MIPI interface supports 2 types of compression or can + * be run without compression. + */ +enum sh_css_mipi_compression { + SH_CSS_MIPI_COMPRESSION_NONE, + SH_CSS_MIPI_COMPRESSION_1, + SH_CSS_MIPI_COMPRESSION_2 +}; + +/* The ISP streaming input interface supports the following formats. + * These match the corresponding MIPI formats. + */ +enum sh_css_input_format { + SH_CSS_INPUT_FORMAT_YUV420_8_LEGACY, /* 8 bits per subpixel */ + SH_CSS_INPUT_FORMAT_YUV420_8, /* 8 bits per subpixel */ + SH_CSS_INPUT_FORMAT_YUV420_10, /* 10 bits per subpixel */ + SH_CSS_INPUT_FORMAT_YUV422_8, /* UYVY..UVYV, 8 bits per subpixel */ + SH_CSS_INPUT_FORMAT_YUV422_10, /* UYVY..UVYV, 10 bits per subpixel */ + SH_CSS_INPUT_FORMAT_RGB_444, /* BGR..BGR, 4 bits per subpixel */ + SH_CSS_INPUT_FORMAT_RGB_555, /* BGR..BGR, 5 bits per subpixel */ + SH_CSS_INPUT_FORMAT_RGB_565, /* BGR..BGR, 5 bits B and $, 6 bits G */ + SH_CSS_INPUT_FORMAT_RGB_666, /* BGR..BGR, 6 bits per subpixel */ + SH_CSS_INPUT_FORMAT_RGB_888, /* BGR..BGR, 8 bits per subpixel */ + SH_CSS_INPUT_FORMAT_RAW_6, /* RAW data, 6 bits per pixel */ + SH_CSS_INPUT_FORMAT_RAW_7, /* RAW data, 7 bits per pixel */ + SH_CSS_INPUT_FORMAT_RAW_8, /* RAW data, 8 bits per pixel */ + SH_CSS_INPUT_FORMAT_RAW_10, /* RAW data, 10 bits per pixel */ + SH_CSS_INPUT_FORMAT_RAW_12, /* RAW data, 12 bits per pixel */ + SH_CSS_INPUT_FORMAT_RAW_14, /* RAW data, 14 bits per pixel */ + SH_CSS_INPUT_FORMAT_RAW_16, /* RAW data, 16 bits per pixel */ + SH_CSS_INPUT_FORMAT_BINARY_8, /* Binary byte stream. */ +}; + +/* Specify the capture mode, this can be RAW (simply copy sensor input to DDR), + * Primary ISP or the Advanced ISP. + */ +enum sh_css_capture_mode { + SH_CSS_CAPTURE_MODE_RAW, /* no processing, only copy input to + output, no viewfinder output */ + SH_CSS_CAPTURE_MODE_PRIMARY, /* primary ISP */ + SH_CSS_CAPTURE_MODE_ADVANCED, /* advanced ISP */ + SH_CSS_CAPTURE_MODE_LOW_LIGHT, /* low light ISP */ +}; + +/* Interrupt info enumeration. + * This lists all possible interrupts for use by the appliation layer. + * Note that the sh_css API uses some internal interrupts, these are not listed + * here. + */ +enum sh_css_interrupt_info { + /* the current frame is done and a new one can be started */ + SH_CSS_IRQ_INFO_FRAME_DONE = 1 << 0, + /* another stage (ISP binary) needs to be started. */ + SH_CSS_IRQ_INFO_START_NEXT_STAGE = 1 << 1, + /* 3A + DIS statistics are ready. */ + SH_CSS_IRQ_INFO_STATISTICS_READY = 1 << 2, + /* the css input system has encountered an error */ + SH_CSS_IRQ_INFO_INPUT_SYSTEM_ERROR = 1 << 3, + /* the input formatter in in error */ + SH_CSS_IRQ_INFO_IF_ERROR = 1 << 4, + /* the css receiver received the start of frame */ + SH_CSS_IRQ_INFO_DMA_ERROR = 1 << 5, + /* A firmware accelerator has terminated */ + SH_CSS_IRQ_INFO_FW_ACC_DONE = 1 << 6, + /* software interrupts */ + SH_CSS_IRQ_INFO_SW_0 = 1 << 7, + SH_CSS_IRQ_INFO_SW_1 = 1 << 8, +}; + + +enum sh_css_rx_irq_info { + SH_CSS_RX_IRQ_INFO_BUFFER_OVERRUN = 1 << 0, + SH_CSS_RX_IRQ_INFO_ENTER_SLEEP_MODE = 1 << 1, + SH_CSS_RX_IRQ_INFO_EXIT_SLEEP_MODE = 1 << 2, + SH_CSS_RX_IRQ_INFO_ECC_CORRECTED = 1 << 3, + SH_CSS_RX_IRQ_INFO_ERR_SOT = 1 << 4, + SH_CSS_RX_IRQ_INFO_ERR_SOT_SYNC = 1 << 5, + SH_CSS_RX_IRQ_INFO_ERR_CONTROL = 1 << 6, + SH_CSS_RX_IRQ_INFO_ERR_ECC_DOUBLE = 1 << 7, + SH_CSS_RX_IRQ_INFO_ERR_CRC = 1 << 8, + SH_CSS_RX_IRQ_INFO_ERR_UNKNOWN_ID = 1 << 9, + SH_CSS_RX_IRQ_INFO_ERR_FRAME_SYNC = 1 << 10, + SH_CSS_RX_IRQ_INFO_ERR_FRAME_DATA = 1 << 11, + SH_CSS_RX_IRQ_INFO_ERR_DATA_TIMEOUT = 1 << 12, + SH_CSS_RX_IRQ_INFO_ERR_UNKNOWN_ESC = 1 << 13, + SH_CSS_RX_IRQ_INFO_ERR_LINE_SYNC = 1 << 14, +}; + +/* Enumeration used to select whether interrupts should be used, and if so, + * whether they are edge or pulse triggered. + * If interrupts are not used, the blocking function + * sh_css_wait_for_completion() must be used. + */ +enum sh_css_interrupt_setting { + SH_CSS_INTERRUPT_SETTING_EDGE, + SH_CSS_INTERRUPT_SETTING_PULSE +}; + +/* Frame formats, some of these come from fourcc.org, others are + better explained by video4linux2. The NV11 seems to be described only + on MSDN pages, but even those seem to be gone now. + Frames can come in many forms, the main categories are RAW, RGB and YUV + (or YCbCr). The YUV frames come in 4 flavors, determined by how the U and V + values are subsampled: + 1. YUV420: hor = 2, ver = 2 + 2. YUV411: hor = 4, ver = 1 + 3. YUV422: hor = 2, ver = 1 + 4. YUV444: hor = 1, ver = 1 + */ +enum sh_css_frame_format { + SH_CSS_FRAME_FORMAT_NV11, /* 12 bit YUV 411, Y, UV plane */ + SH_CSS_FRAME_FORMAT_NV12, /* 12 bit YUV 420, Y, UV plane */ + SH_CSS_FRAME_FORMAT_NV16, /* 16 bit YUV 422, Y, UV plane */ + SH_CSS_FRAME_FORMAT_NV21, /* 12 bit YUV 420, Y, VU plane */ + SH_CSS_FRAME_FORMAT_NV61, /* 16 bit YUV 422, Y, VU plane */ + SH_CSS_FRAME_FORMAT_YV12, /* 12 bit YUV 420, Y, V, U plane */ + SH_CSS_FRAME_FORMAT_YV16, /* 16 bit YUV 422, Y, V, U plane */ + SH_CSS_FRAME_FORMAT_YUV420, /* 12 bit YUV 420, Y, U, V plane */ + SH_CSS_FRAME_FORMAT_YUV420_16, /* yuv420, 16 bits per subpixel */ + SH_CSS_FRAME_FORMAT_YUV422, /* 16 bit YUV 422, Y, U, V plane */ + SH_CSS_FRAME_FORMAT_YUV422_16, /* yuv422, 16 bits per subpixel */ + SH_CSS_FRAME_FORMAT_UYVY, /* 16 bit YUV 422, UYVY interleaved */ + SH_CSS_FRAME_FORMAT_YUYV, /* 16 bit YUV 422, YUYV interleaved */ + SH_CSS_FRAME_FORMAT_YUV444, /* 24 bit YUV 444, Y, U, V plane */ + SH_CSS_FRAME_FORMAT_YUV_LINE, /* Internal format, e.g. for VBF frame. + 2 y lines followed by a uv + interleaved line */ + SH_CSS_FRAME_FORMAT_RAW, /* RAW, 1 plane */ + SH_CSS_FRAME_FORMAT_RGB565, /* 16 bit RGB, 1 plane. Each 3 sub + pixels are packed into one 16 bit + value, 5 bits for R, 6 bits for G + and 5 bits for B. */ + SH_CSS_FRAME_FORMAT_PLANAR_RGB888, /* 24 bit RGB, 3 planes */ + SH_CSS_FRAME_FORMAT_RGBA888, /* 32 bit RGBA, 1 plane, A=Alpha unused + */ + SH_CSS_FRAME_FORMAT_QPLANE6, /* Internal, for advanced ISP */ + SH_CSS_FRAME_FORMAT_BINARY_8, /* byte stream, used for jpeg. For + frames of this type, we set the + height to 1 and the width to the + number of allocated bytes. */ +}; + +struct sh_css_frame_plane { + unsigned int height; /* height of a plane in lines */ + unsigned int width; /* width of a line, in DMA elements, note that + for RGB565 the three subpixels are stored in + one element. For all other formats this is + the number of subpixels per line. */ + unsigned int stride; /* stride of a line in bytes */ + void *data; /* pointer that points into frame data */ +}; + +struct sh_css_frame_binary_plane { + unsigned int size; + struct sh_css_frame_plane data; +}; + +struct sh_css_frame_yuv_planes { + struct sh_css_frame_plane y; + struct sh_css_frame_plane u; + struct sh_css_frame_plane v; +}; + +struct sh_css_frame_nv_planes { + struct sh_css_frame_plane y; + struct sh_css_frame_plane uv; +}; + +struct sh_css_frame_rgb_planes { + struct sh_css_frame_plane r; + struct sh_css_frame_plane g; + struct sh_css_frame_plane b; +}; + +struct sh_css_frame_plane6_planes { + struct sh_css_frame_plane r; + struct sh_css_frame_plane r_at_b; + struct sh_css_frame_plane gr; + struct sh_css_frame_plane gb; + struct sh_css_frame_plane b; + struct sh_css_frame_plane b_at_r; +}; + +/* For RAW input, the bayer order needs to be specified separately. There + are 4 possible orders. The name is constructed by taking the first two + colors on the first line and the first two colors from the second line. +grbg: GRGRGRGR + BGBGBGBG +rgbg: RGRGRGRG + GBGBGBGB +bggr: BGBGBGBG + GRGRGRGR +gbrg: GBGBGBGB + RGRGRGRG + */ +enum sh_css_bayer_order { + sh_css_bayer_order_grbg, + sh_css_bayer_order_rggb, + sh_css_bayer_order_bggr, + sh_css_bayer_order_gbrg +}; + +/* Frame info struct: + This structure describes a frame. It contains the resolution and strides. + */ +struct sh_css_frame_info { + /* width in valid data in pixels (not subpixels) */ + unsigned int width; + /* height in lines of valid image data */ + unsigned int height; + /* width of a line in memory, in pixels */ + unsigned int padded_width; + /* format of the data in this frame */ + enum sh_css_frame_format format; + /* number of valid bits per pixel, only valid for raw frames. */ + unsigned int raw_bit_depth; + /* bayer order of raw data, only valid for raw frames. */ + enum sh_css_bayer_order raw_bayer_order; +}; + +struct sh_css_frame { + struct sh_css_frame_info info; + /* pointer to start of image data in memory */ + void *data; + /* size of data pointer in bytes */ + unsigned int data_bytes; + /* indicate whether memory is allocated physically contiguously */ + bool contiguous; + union { + struct sh_css_frame_plane raw; + struct sh_css_frame_plane rgb; + struct sh_css_frame_rgb_planes planar_rgb; + struct sh_css_frame_plane yuyv; + struct sh_css_frame_yuv_planes yuv; + struct sh_css_frame_nv_planes nv; + struct sh_css_frame_plane6_planes plane6; + struct sh_css_frame_binary_plane binary; + } planes; +}; + +/* Histogram. This contains num_elements values of type unsigned int. + * The data pointer is a DDR pointer (virtual address). + */ +struct sh_css_histogram { + unsigned int num_elements; + void *data; +}; + +/* Overlay: + * this is the structure describing the entire overlay. + * An overlay consists of a frame (of type sh_css_frame_format_yuv420), + * the background color (yuv) and the blending ratios for the subpixels + * of the input data and the overlay data. + * All pixels in the overlay that are not equal to the background are + * overlaid, taking their blending ratio into account. The blending ratio + * should be specified between 0 and 100. + */ +struct sh_css_overlay { + /* the frame containing the overlay data The overlay frame width should + * be the multiples of 2*ISP_VEC_NELEMS. The overlay frame height + * should be the multiples of 2. + */ + struct sh_css_frame *frame; + /* Y value of overlay background */ + unsigned char bg_y; + /* U value of overlay background */ + char bg_u; + /* V value of overlay background */ + char bg_v; + /* the blending percent of input data for Y subpixels */ + unsigned char blend_input_perc_y; + /* the blending percent of input data for U subpixels */ + unsigned char blend_input_perc_u; + /* the blending percent of input data for V subpixels */ + unsigned char blend_input_perc_v; + /* the blending percent of overlay data for Y subpixels */ + unsigned char blend_overlay_perc_y; + /* the blending percent of overlay data for U subpixels */ + unsigned char blend_overlay_perc_u; + /* the blending percent of overlay data for V subpixels */ + unsigned char blend_overlay_perc_v; + /* the overlay start x pixel position on output frame It should be the + multiples of 2*ISP_VEC_NELEMS. */ + unsigned int overlay_start_x; + /* the overlay start y pixel position on output frame It should be the + multiples of 2. */ + unsigned int overlay_start_y; +}; + +/* SP struct describing overlay properties */ +struct sh_css_sp_overlay { + int bg_y; + int bg_u; + int bg_v; + int blend_shift; + int blend_input_y; + int blend_input_u; + int blend_input_v; + int blend_overlay_y; + int blend_overlay_u; + int blend_overlay_v; + int overlay_width; + int overlay_height; + int overlay_start_x; + int overlay_start_y; + const char *frame_ptr_overlay_y; + const char *frame_ptr_overlay_u; + const char *frame_ptr_overlay_v; +}; + +/* structure that describes the 3A and DIS grids */ +struct sh_css_grid_info { + /* ISP input size that is visible for user */ + unsigned int isp_in_width; + unsigned int isp_in_height; + /* 3A statistics grid: */ + unsigned int s3a_width; + unsigned int s3a_height; + unsigned int s3a_bqs_per_grid_cell; + /* DIS grid: */ + unsigned int dis_width; /* also used for vertical projections */ + unsigned int dis_aligned_width; + unsigned int dis_height; /* also used for horizontal projections */ + unsigned int dis_aligned_height; + unsigned int dis_bqs_per_grid_cell; + unsigned int dis_hor_coef_num; + unsigned int dis_ver_coef_num; +}; + +enum sh_css_ob_mode { + sh_css_ob_mode_none, + sh_css_ob_mode_fixed, + sh_css_ob_mode_raster +}; + +/* Shading correction */ +enum sh_css_sc_color { + SH_CSS_SC_COLOR_GR, + SH_CSS_SC_COLOR_R, + SH_CSS_SC_COLOR_B, + SH_CSS_SC_COLOR_GB +}; + +/* White Balance (Gain Adjust) */ +struct sh_css_wb_config { + unsigned int integer_bits; + unsigned int gr; /* unsigned .<16-integer_bits> */ + unsigned int r; /* unsigned .<16-integer_bits> */ + unsigned int b; /* unsigned .<16-integer_bits> */ + unsigned int gb; /* unsigned .<16-integer_bits> */ +}; + +/* Color Space Conversion settings */ +struct sh_css_cc_config { + unsigned int fraction_bits; + int matrix[3 * 3]; /* RGB2YUV Color matrix, signed + <13-fraction_bits>. */ +}; + +/* Morphing table for advanced ISP. + * Each line of width elements takes up COORD_TABLE_EXT_WIDTH elements + * in memory. + */ +struct sh_css_morph_table { + unsigned int height; + unsigned int width; /* number of valid elements per line */ + unsigned short *coordinates_x[SH_CSS_MORPH_TABLE_NUM_PLANES]; + unsigned short *coordinates_y[SH_CSS_MORPH_TABLE_NUM_PLANES]; +}; + +struct sh_css_fpn_table { + short *data; + unsigned int width; + unsigned int height; + unsigned int shift; +}; + +struct sh_css_shading_table { + /* native sensor resolution */ + unsigned int sensor_width; + unsigned int sensor_height; + /* number of data points per line per color (bayer quads) */ + unsigned int width; + /* number of lines of data points per color (bayer quads) */ + unsigned int height; + /* bits of fraction part for shading table values */ + unsigned int fraction_bits; + /* one table for each color (use sh_css_sc_color to index) */ + unsigned short *data[SH_CSS_SC_NUM_COLORS]; +}; + +struct sh_css_gamma_table { + unsigned short data[SH_CSS_GAMMA_TABLE_SIZE]; +}; + +struct sh_css_ctc_table { + unsigned short data[SH_CSS_CTC_TABLE_SIZE]; +}; + +struct sh_css_macc_table { + short data[SH_CSS_MACC_NUM_COEFS * SH_CSS_MACC_NUM_AXES]; +}; + +/* Temporal noise reduction configuration */ +struct sh_css_tnr_config { + u0_16 gain; /* [gain] Strength of NR */ + u0_16 threshold_y; /* [intensity] Motion sensitivity for Y */ + u0_16 threshold_uv; /* [intensity] Motion sensitivity for U/V */ +}; + +/* Optical black level configuration */ +struct sh_css_ob_config { + /* Obtical black level mode (Fixed / Raster) */ + enum sh_css_ob_mode mode; + /* [intensity] optical black level for GR (relevant for fixed mode) */ + u0_16 level_gr; + /* [intensity] optical black level for R (relevant for fixed mode) */ + u0_16 level_r; + /* [intensity] optical black level for B (relevant for fixed mode) */ + u0_16 level_b; + /* [intensity] optical black level for GB (relevant for fixed mode) */ + u0_16 level_gb; + /* [BQ] 0..63 start position of OB area (relevant for raster mode) */ + unsigned short start_position; + /* [BQ] start..63 end position of OB area (relevant for raster mode) */ + unsigned short end_position; +}; + +/* Defect pixel correction configuration */ +struct sh_css_dp_config { + /* [intensity] The threshold of defect Pixel Correction, representing + * the permissible difference of intensity between one pixel and its + * surrounding pixels. Smaller values result in more frequent pixel + * corrections. + */ + u0_16 threshold; + /* [gain] The sensitivity of mis-correction. ISP will miss a lot of + * defects if the value is set too large. + */ + u8_8 gain; +}; + +/* Configuration used by Bayer noise reduction and YCC noise reduction */ +struct sh_css_nr_config { + /* [gain] Strength of noise reduction for Bayer NR (Used by Bayer NR) */ + u0_16 bnr_gain; + /* [gain] Strength of noise reduction for YCC NR (Used by YCC NR) */ + u0_16 ynr_gain; + /* [intensity] Sensitivity of Edge (Used by Bayer NR) */ + u0_16 direction; + /* [intensity] coring threshold for Cb (Used by YCC NR) */ + u0_16 threshold_cb; + /* [intensity] coring threshold for Cr (Used by YCC NR) */ + u0_16 threshold_cr; +}; + +/* Edge enhancement (sharpen) configuration */ +struct sh_css_ee_config { + /* [gain] The strength of sharpness. */ + u5_11 gain; + /* [intensity] The threshold that divides noises from edge. */ + u8_8 threshold; + /* [gain] The strength of sharpness in pell-mell area. */ + u5_11 detail_gain; +}; + +struct sh_css_de_config { + u0_16 pixelnoise; + u0_16 c1_coring_threshold; + u0_16 c2_coring_threshold; +}; + +struct sh_css_gc_config { + unsigned short gain_k1; + unsigned short gain_k2; +}; + +struct sh_css_anr_config { + int threshold; +}; + +struct sh_css_ce_config { + u0_16 uv_level_min; + u0_16 uv_level_max; +}; + +struct sh_css_3a_config { + u0_16 ae_y_coef_r; /* [gain] Weight of R for Y */ + u0_16 ae_y_coef_g; /* [gain] Weight of G for Y */ + u0_16 ae_y_coef_b; /* [gain] Weight of B for Y */ + u0_16 awb_lg_high_raw; /* [intensity] AWB level gate high for raw */ + u0_16 awb_lg_low; /* [intensity] AWB level gate low */ + u0_16 awb_lg_high; /* [intensity] AWB level gate high */ + s0_15 af_fir1_coef[7]; /* [factor] AF FIR coefficients of fir1 */ + s0_15 af_fir2_coef[7]; /* [factor] AF FIR coefficients of fir2 */ +}; + +/* Guard this declaration, because this struct is also defined by + * Sh3a_Types.h now + */ +#ifndef __SH_CSS_3A_OUTPUT__ +#define __SH_CSS_3A_OUTPUT__ + +/* Workaround: hivecc complains about "tag "sh_css_3a_output" already declared" + without this extra decl. */ +struct sh_css_3a_output; + +struct sh_css_3a_output { + int ae_y; + int awb_cnt; + int awb_gr; + int awb_r; + int awb_b; + int awb_gb; + int af_hpf1; + int af_hpf2; +}; + +#endif /* End of guard */ + +/* Descriptor of sp firmware blob */ +struct sh_css_sp_fw { + const void *text; /* Sp text section */ + unsigned int text_source; /* Position of text in blob */ + unsigned int text_size; /* Size of text section */ + const void *data; /* Sp data section */ + unsigned int data_source; /* Position of data in blob */ + unsigned int data_target; /* Start position of data in SP dmem */ + unsigned int data_size; /* Size of text section */ + unsigned int bss_target; /* Start position of bss in SP dmem */ + unsigned int bss_size; /* Size of bss section */ + void *dmem_init_data; /* Addr sp init data */ +}; + +/* this struct contains all arguments that can be passed to + a binary. It depends on the binary which ones are used. */ +struct sh_css_binary_args { + struct sh_css_frame *cc_frame; /* continuous capture frame */ + struct sh_css_frame *in_frame; /* input frame */ + struct sh_css_frame *in_ref_frame; /* reference input frame */ + struct sh_css_frame *in_tnr_frame; /* tnr input frame */ + struct sh_css_frame *out_frame; /* output frame */ + struct sh_css_frame *out_ref_frame; /* reference output frame */ + struct sh_css_frame *out_tnr_frame; /* tnr output frame */ + struct sh_css_frame *extra_frame; /* intermediate frame */ + struct sh_css_frame *out_vf_frame; /* viewfinder output frame */ + int dvs_vector_x; + int dvs_vector_y; + bool enable_xnr; + bool two_ppc; + bool copy_vf; + bool copy_output; + unsigned vf_downscale_log2; +}; + +/* Type of acceleration */ +enum sh_css_acc_type { + SH_CSS_ACC_STANDALONE, /* Stand-alone acceleration */ + SH_CSS_ACC_OUTPUT, /* Accelerator stage on output frame */ + SH_CSS_ACC_VIEWFINDER /* Accelerator stage on viewfinder frame */ +}; + +/* Type of acceleration argument */ +enum sh_css_acc_arg_type { + SH_CSS_ACC_ARG_SCALAR_IN, /* Scalar input argument */ + SH_CSS_ACC_ARG_SCALAR_OUT, /* Scalar output argument */ + SH_CSS_ACC_ARG_SCALAR_IO, /* Scalar in/output argument */ + SH_CSS_ACC_ARG_PTR_IN, /* Pointer input argument */ + SH_CSS_ACC_ARG_PTR_OUT, /* Pointer output argument */ + SH_CSS_ACC_ARG_PTR_IO, /* Pointer in/output argument */ + SH_CSS_ACC_ARG_PTR_NOFLUSH, /* Pointer argument will not be flushed */ + SH_CSS_ACC_ARG_PTR_STABLE, /* Pointer input argument that is stable */ + SH_CSS_ACC_ARG_FRAME /* Frame argument */ +}; + +/* Descriptor for an SP argument */ +struct sh_css_sp_arg { + enum sh_css_acc_arg_type type; /* Type of SP argument */ + void *value; /* Value of SP argument */ + bool stable; /* Pointer is stable */ + unsigned int size; /* Size of SP argument */ + void *host; /* Private data used by host */ +}; + +struct sh_css_acc_fw; + +/* Firmware descriptor */ +struct sh_css_acc_fw_hdr { + enum sh_css_acc_type type; /* Type of accelerator */ + bool loaded; /* Firmware has been loaded */ + struct sh_css_sp_arg *sp_args; /* Current SP argument */ + unsigned prog_name_offset; /* offset wrt hdr in bytes */ + unsigned arg_types_offset; /* offset wrt hdr in bytes */ + unsigned sp_blob_offset; /* offset wrt hdr in bytes */ + unsigned isp_blob_offset; /* offset wrt hdr in bytes */ + struct { + unsigned int size; /* Size of sp blob */ + void (*init) (struct sh_css_acc_fw *); /* init for crun */ + void *entry; /* Address of sp entry point */ + unsigned int *args; /* Address of sp_args */ + unsigned int args_cnt; /* Number of sp_args */ + unsigned int args_size; /* Size of sp_args */ + unsigned int *css_abort; /* SP dmem abort flag */ + struct sh_css_frame *input; /* SP dmem input frame */ + struct sh_css_frame *output; /* SP dmem output frame */ + struct sh_css_frame *out_vf; /* SP dmem vf frame */ + struct sh_css_frame *extra; /* SP dmem extra frame */ + unsigned int *vf_downscale_bits; + void *isp_code; /* SP dmem address holding xmem + address of isp code */ + struct sh_css_sp_fw fw; /* SP fw descriptor */ + } sp; + struct { + unsigned int size; /* Size of isp blob */ + } isp; + /* To create a sequence of accelerators */ + struct sh_css_acc_fw *next; + /* Firmware handle between user space and kernel */ + unsigned int handle; + /* Hmm pointer of allocated SP code */ + const unsigned char *sp_code; + /* Hmm pointer of allocated ISP code */ + const unsigned char *isp_code; +}; + +/* Firmware. Containing header and actual blobs */ +struct sh_css_acc_fw { + /* firmware header */ + struct sh_css_acc_fw_hdr header; + /* followed by prog_name, sp arg types, sp blob and isp blob */ +#ifdef __HIVECC + unsigned char data[1]; /* Not C89 */ +#else + unsigned char data[]; +#endif +}; + +/* Access macros for firmware */ +#define SH_CSS_ACC_OFFSET(t, f, n) ((t)((unsigned char *)(f)+(f->header.n))) +#define SH_CSS_ACC_PROG_NAME(f) SH_CSS_ACC_OFFSET(const char *, f, \ + prog_name_offset) +#define SH_CSS_ACC_SP_ARGS(f) SH_CSS_ACC_OFFSET(enum sh_css_acc_arg_type*,\ + f, arg_types_offset) +#define SH_CSS_ACC_SP_CODE(f) SH_CSS_ACC_OFFSET(unsigned char *, f, \ + sp_blob_offset) +#define SH_CSS_ACC_SP_SIZE(f) ((f)->header.sp.size) +#define SH_CSS_ACC_SP_DATA(f) (SH_CSS_ACC_SP_CODE(f) + \ + (f)->header.sp.fw.data_source) +#define SH_CSS_ACC_ISP_CODE(f) SH_CSS_ACC_OFFSET(unsigned char*, f,\ + isp_blob_offset) +#define SH_CSS_ACC_ISP_SIZE(f) ((f)->header.isp.size) +#define SH_CSS_ACC_SIZE(f) ((f)->header.isp_blob_offset + \ + SH_CSS_ACC_ISP_SIZE(f)) + +/* Structure to encapsulate required arguments for + * initialization of SP DMEM using the SP itself + */ +struct sh_css_sp_init_dmem_cfg { + unsigned done; /* Init has been done */ + void *ddr_data_addr; /* data segment address in ddr */ + void *dmem_data_addr; /* data segment address in dmem */ + unsigned int data_size; /* data segment size */ + void *dmem_bss_addr; /* bss segment address in dmem */ + unsigned int bss_size; /* bss segment size */ +}; + +#endif /* _SH_CSS_TYPES_H_ */ diff --git a/gst-libs/gst/camera/Makefile.am b/gst-libs/gst/camera/Makefile.am index 5d6c0d5..687b43a 100644 --- a/gst-libs/gst/camera/Makefile.am +++ b/gst-libs/gst/camera/Makefile.am @@ -1,25 +1,27 @@ -lib_LTLIBRARIES = libgstcamera-@GST_MAJORMINOR@.la +lib_LTLIBRARIES = libgstmfldcamera-@GST_MAJORMINOR@.la -libgstcamera_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/camera +libgstmfldcamera_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/camera -libgstcamera_@GST_MAJORMINOR@_la_SOURCES = gstcamerasrc.c \ - gstcameracolorbalance.c \ - gstcameraphotoiface.c +libgstmfldcamera_@GST_MAJORMINOR@_la_SOURCES = gstmfldcamerasrc.c \ + gstmfldcameracolorbalance.c \ + gstmfldcameraphotoiface.c \ + gstmfldcameracontroliface.c -libgstcamera_@GST_MAJORMINOR@include_HEADERS = gstcamerasrc.h \ - gstcameracolorbalance.h +libgstmfldcamera_@GST_MAJORMINOR@include_HEADERS = gstmfldcamerasrc.h \ + gstmfldcameracolorbalance.h \ + gstmfldcameracontroliface.h -libgstcamera_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) -DGST_USE_UNSTABLE_API \ +libgstmfldcamera_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) -DGST_USE_UNSTABLE_API \ $(GST_PLUGINS_BASE_CFLAGS) -libgstcamera_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) \ +libgstmfldcamera_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) \ $(GST_BASE_LIBS) \ -lgstinterfaces-$(GST_MAJORMINOR) \ -lgsttag-$(GST_MAJORMINOR) \ -lgstphotography-$(GST_MAJORMINOR) -libgstcamera_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) \ +libgstmfldcamera_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) \ $(GST_ALL_LDFLAGS) \ $(GST_LT_LDFLAGS) -noinst_HEADERS = gstcameraphotoiface.h +noinst_HEADERS = gstmfldcameraphotoiface.h diff --git a/gst-libs/gst/camera/gstmfldcameracolorbalance.c b/gst-libs/gst/camera/gstmfldcameracolorbalance.c new file mode 100644 index 0000000..a245408 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcameracolorbalance.c @@ -0,0 +1,130 @@ +/* GStreamer + * + * Copyright (C) 2003 Ronald Bultje + * 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * + * gstcameracolorbalance.c: generic color balance interface implementation + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include "gstmfldcameracolorbalance.h" + +GST_BOILERPLATE (GstCameraSrcColorBalanceChannel, + gst_camerasrc_color_balance_channel, + GstColorBalanceChannel, GST_TYPE_COLOR_BALANCE_CHANNEL); + +static void +gst_camerasrc_color_balance_channel_base_init (gpointer g_class) +{ +} + +static void +gst_camerasrc_color_balance_channel_class_init + (GstCameraSrcColorBalanceChannelClass * klass) +{ +} + +static void +gst_camerasrc_color_balance_channel_init (GstCameraSrcColorBalanceChannel * + channel, GstCameraSrcColorBalanceChannelClass * klass) +{ + channel->id = (guint32) - 1; +} + +static G_GNUC_UNUSED gboolean +gst_camerasrc_color_balance_contains_channel (GstCameraSrc * camerasrc, + GstCameraSrcColorBalanceChannel * channel) +{ + const GList *item; + + for (item = camerasrc->colors; item != NULL; item = item->next) + if (item->data == channel) + return TRUE; + + return FALSE; +} + +const GList * +gst_camerasrc_color_balance_list_channels (GstCameraSrc * camerasrc) +{ + return camerasrc->colors; +} + +void +gst_camerasrc_color_balance_set_value (GstCameraSrc * camerasrc, + GstColorBalanceChannel * channel, gint value) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + GstCameraSrcColorBalanceChannel *camchannel = + GST_CAMERA_SRC_COLOR_BALANCE_CHANNEL (channel); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + + /* assert that we're opened and that we're using a known item */ + g_return_if_fail (opened); + g_return_if_fail (gst_camerasrc_color_balance_contains_channel (camerasrc, + camchannel)); + + GST_OBJECT_LOCK (camerasrc); + switch (camchannel->id) { + case MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV: + camerasrc->photoconf.tone_mode = value; + break; + case MM_CAM_FILTER_WB_SOURCE_PRIV: + camerasrc->photoconf.wb_mode = value; + break; + default: + break; + } + GST_OBJECT_UNLOCK (camerasrc); + + bclass->set_attribute (camerasrc, camchannel->id, value); +} + +gint +gst_camerasrc_color_balance_get_value (GstCameraSrc * camerasrc, + GstColorBalanceChannel * channel) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + GstCameraSrcColorBalanceChannel *camchannel = + GST_CAMERA_SRC_COLOR_BALANCE_CHANNEL (channel); + gint value; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + + /* assert that we're opened and that we're using a known item */ + g_return_val_if_fail (opened, 0); + g_return_val_if_fail (gst_camerasrc_color_balance_contains_channel (camerasrc, + camchannel), 0); + + if (!bclass->get_attribute (camerasrc, camchannel->id, &value)) + return 0; + + return value; +} diff --git a/gst-libs/gst/camera/gstmfldcameracolorbalance.h b/gst-libs/gst/camera/gstmfldcameracolorbalance.h new file mode 100644 index 0000000..2c9f9f5 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcameracolorbalance.h @@ -0,0 +1,155 @@ +/* GStreamer + * + * Copyright (C) 2003 Ronald Bultje + * 2006 Edgard Lima + * 2008 Nokia Corporation + * + * gstcameracolorbalance.h: Generic color balance interface implementation + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_CAMERA_SRC_COLOR_BALANCE_H__ +#define __GST_CAMERA_SRC_COLOR_BALANCE_H__ + +#include +#include +#include + +#include "gstmfldcamerasrc.h" + +G_BEGIN_DECLS + +#define GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL \ + (gst_camerasrc_color_balance_channel_get_type ()) +#define GST_CAMERA_SRC_COLOR_BALANCE_CHANNEL(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL, \ + GstCameraSrcColorBalanceChannel)) +#define GST_CAMERA_SRC_COLOR_BALANCE_CHANNEL_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL, \ + GstCameraSrcColorBalanceChannelClass)) +#define GST_IS_CAMERA_SRC_COLOR_BALANCE_CHANNEL(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL)) +#define GST_IS_CAMERA_SRC_COLOR_BALANCE_CHANNEL_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL)) + + +/* Keep standard v4l2 controls supported + * add own private control id's for tizen specific controls + * note rule below for priv control ids. + * #define V4L2_CID_PRIVATE_BASE 0x08000000 + */ +#define SOURCE_PRIV_BASE V4L2_CID_PRIVATE_BASE +#define MM_CAM_FILTER_WB_SOURCE_PRIV (SOURCE_PRIV_BASE + 1) +#define MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV (SOURCE_PRIV_BASE + 2) + +#define MM_CAM_SOURCE_PRIV_LAST MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV + +static struct v4l2_queryctrl mmfw_wb_controls[] = { + { + .id = MM_CAM_FILTER_WB_SOURCE_PRIV, + .type = V4L2_CTRL_TYPE_INTEGER, + .name = "white balance", + .minimum = 0x0, + .maximum = 0xffff, + .step = 0x01, + .default_value = 0x00, + .flags = 0, + }, + { + .id = MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV, + .type = V4L2_CTRL_TYPE_INTEGER, + .name = "color tone", + .minimum = 0x0, + .maximum = 0xffff, + .step = 0x01, + .default_value = 0x00, + .flags = 0, + } +}; + +#ifndef ARRAY_SIZE +#define ARRAY_SIZE(arr) (sizeof(arr) / sizeof(arr[0])) +#endif + +#define N_MMFW_CONTROLS (ARRAY_SIZE(mmfw_wb_controls)) + + +typedef struct _GstCameraSrcColorBalanceChannel { + GstColorBalanceChannel parent; + + guint32 id; +} GstCameraSrcColorBalanceChannel; + +/** + * GstCameraSrcColorBalanceChannelClass: + * @parent: Element parent class + * + * #GstCameraSrcColorBalanceChannelClass class object. + */ +typedef struct _GstCameraSrcColorBalanceChannelClass { + GstColorBalanceChannelClass parent; +} GstCameraSrcColorBalanceChannelClass; + +GType gst_camerasrc_color_balance_channel_get_type (void); + +const GList * gst_camerasrc_color_balance_list_channels (GstCameraSrc * camerasrc); + +void gst_camerasrc_color_balance_set_value (GstCameraSrc * camerasrc, + GstColorBalanceChannel * channel, + gint value); + +gint gst_camerasrc_color_balance_get_value (GstCameraSrc * camerasrc, + GstColorBalanceChannel * channel); + +#define GST_IMPLEMENT_CAMERA_SRC_COLOR_BALANCE_METHODS(Type, interface_as_function) \ + \ +static const GList * \ +interface_as_function ## _color_balance_list_channels (GstColorBalance * balance) \ +{ \ + Type *this = (Type*) balance; \ + return gst_camerasrc_color_balance_list_channels(this); \ +} \ + \ +static void \ +interface_as_function ## _color_balance_set_value (GstColorBalance * balance, \ + GstColorBalanceChannel * channel, \ + gint value) \ +{ \ + Type *this = (Type*) balance; \ + gst_camerasrc_color_balance_set_value (this, channel, value); \ +} \ + \ +static gint \ +interface_as_function ## _color_balance_get_value (GstColorBalance * balance, \ + GstColorBalanceChannel * channel) \ +{ \ + Type *this = (Type*) balance; \ + return gst_camerasrc_color_balance_get_value(this, channel); \ +} \ + \ +void \ +interface_as_function ## _color_balance_interface_init (GstColorBalanceClass * klass) \ +{ \ + GST_COLOR_BALANCE_TYPE (klass) = GST_COLOR_BALANCE_HARDWARE; \ + \ + /* default virtual functions */ \ + klass->list_channels = interface_as_function ## _color_balance_list_channels; \ + klass->set_value = interface_as_function ## _color_balance_set_value; \ + klass->get_value = interface_as_function ## _color_balance_get_value; \ +} \ + +#endif /* __GST_CAMERA_SRC_COLOR_BALANCE_H__ */ diff --git a/gst-libs/gst/camera/gstmfldcameracontroliface.c b/gst-libs/gst/camera/gstmfldcameracontroliface.c new file mode 100644 index 0000000..0a7abf9 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcameracontroliface.c @@ -0,0 +1,701 @@ +/* GStreamer + * + * @author: Marko Ollonen + * + * gstmfldcameracontroliface.c: Camera control interface implementation + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include +#include "gstmfldcameracontroliface.h" +#include "gstmfldcamerasrc.h" + +GST_DEBUG_CATEGORY_EXTERN (gst_camerasrc_debug); +#define GST_CAT_DEFAULT gst_camerasrc_debug + + +GST_BOILERPLATE (GstCameraSrcCameraControlChannel, + gst_camerasrc_camera_control_channel, + GstCameraControlChannel, GST_TYPE_CAMERA_CONTROL_CHANNEL); + + +static void +gst_camerasrc_camera_control_channel_base_init (gpointer g_class) +{ + +} + +static void +gst_camerasrc_camera_control_channel_class_init + (GstCameraSrcCameraControlChannelClass * klass) +{ + +} + +static void +gst_camerasrc_camera_control_channel_init (GstCameraSrcCameraControlChannel * + channel, GstCameraSrcCameraControlChannelClass * klass) +{ + channel->id = (guint32) - 1; + +} + +static G_GNUC_UNUSED gboolean +gst_camerasrc_camera_control_contains_channel (GstCameraSrc * camerasrc, + GstCameraSrcCameraControlChannelClass * channel) +{ + const GList *item; + + + for (item = camerasrc->camera_controls; item != NULL; item = item->next) + if (item->data == channel) + return TRUE; + + return FALSE; +} +const GList * +gst_camerasrc_camera_control_list_channels (GstCameraSrc * camerasrc) +{ + return camerasrc->camera_controls; +} + +gboolean +gst_camerasrc_camera_control_set_value( GstCameraSrc * camerasrc, + GstCameraControlChannel *control_channel ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + + /* assert that we're opened and that we're using a known item */ + g_return_val_if_fail (opened, FALSE); + + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_value( GstCameraSrc * camerasrc, + GstCameraControlChannel *control_channel ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + + return FALSE; +} +gboolean +gst_camerasrc_camera_control_set_exposure( GstCameraSrc * camerasrc, + gint type, gint value1, gint value2 ) +{ + gboolean ret = FALSE; + GstCameraSrcClass *bclass; + gboolean opened; + gboolean write = FALSE; + gboolean scene_override = FALSE; + guint32 val; + + GST_DEBUG_OBJECT (camerasrc, "%s",__func__); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + GST_OBJECT_LOCK (camerasrc); + switch( type ) { + case GST_CAMERA_CONTROL_F_NUMBER: + GST_DEBUG_OBJECT (camerasrc, "GST_CAMERA_CONTROL_F_NUMBER value1:%d , value2:%d - set not supported", value1, value2); + break; + case GST_CAMERA_CONTROL_SHUTTER_SPEED: + GST_DEBUG_OBJECT (camerasrc, "GST_CAMERA_CONTROL_SHUTTER_SPEED value1:%d , value2:%d - manual exposure not supported", value1, value2); + break; + case GST_CAMERA_CONTROL_ISO: + GST_DEBUG_OBJECT (camerasrc, "GST_CAMERA_CONTROL_ISO value:%d", value1); + camerasrc->photoconf.iso_speed = value1; + write = TRUE; + break; + case GST_CAMERA_CONTROL_PROGRAM_MODE: + GST_DEBUG_OBJECT (camerasrc, "GST_CAMERA_CONTROL_PROGRAM_MODE value:%d", value1); + camerasrc->photoconf.scene_mode = value1; + write = scene_override = TRUE; + break; + case GST_CAMERA_CONTROL_EXPOSURE_MODE: + GST_DEBUG_OBJECT (camerasrc, "GST_CAMERA_CONTROL_EXPOSURE_MODE value:%d", value1); + ret = bclass->set_ae_mode(camerasrc, value1); + break; + case GST_CAMERA_CONTROL_EXPOSURE_VALUE: + val = ((value1 << 16) | value2); + GST_DEBUG_OBJECT (camerasrc, "GST_CAMERA_CONTROL_EXPOSURE_VALUE value1:%d , value2:%d -> val:%d", value1, value2, val); + camerasrc->photoconf.exposure = val; + write = TRUE; + break; + default: + GST_DEBUG_OBJECT (camerasrc, "%s",__func__); + } + GST_OBJECT_UNLOCK (camerasrc); + + if (write && bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, scene_override); + } + GST_DEBUG_OBJECT (camerasrc, "%s ret:%d",__func__, ret); + + return ret; +} +gboolean +gst_camerasrc_camera_control_get_exposure( GstCameraSrc* camerasrc, + gint type, gint* value1, gint* value2 ) +{ + gboolean ret = FALSE; + GstCameraSrcClass *bclass; + gboolean opened; + + GST_DEBUG_OBJECT (camerasrc, "%s",__func__); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + GST_OBJECT_LOCK (camerasrc); + switch( type ) { + case GST_CAMERA_CONTROL_F_NUMBER: + *value1 = *value2 = 0; + break; + case GST_CAMERA_CONTROL_SHUTTER_SPEED: + *value1 = *value2 = 0; + break; + case GST_CAMERA_CONTROL_ISO: + *value1 = camerasrc->photoconf.iso_speed; + ret = TRUE; + break; + case GST_CAMERA_CONTROL_PROGRAM_MODE: + *value1 = camerasrc->photoconf.scene_mode; + ret = TRUE; + break; + case GST_CAMERA_CONTROL_EXPOSURE_MODE: + ret = bclass->get_ae_mode(camerasrc, value1); + ret = TRUE; + break; + case GST_CAMERA_CONTROL_EXPOSURE_VALUE: + *value1 = gst_value_get_fraction_numerator (camerasrc->photoconf.exposure); + *value2 = gst_value_get_fraction_denominator (camerasrc->photoconf.exposure); + ret = TRUE; + break; + default: + GST_DEBUG_OBJECT (camerasrc, "%s",__func__); + } + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "%d -> value1:%d , value2:%d", type, value1, value2); + return ret; +} +gboolean +gst_camerasrc_camera_control_set_capture_mode( GstCameraSrc* camerasrc, + gint type, gint value ) +{ + return FALSE; +} +gboolean +gst_camerasrc_camera_control_get_capture_mode( GstCameraSrc* camerasrc, + gint type, gint* value ) +{ + return FALSE; +} +gboolean +gst_camerasrc_camera_control_set_strobe( GstCameraSrc* camerasrc, + gint type, gint value ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + gboolean ret = FALSE; + + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + + if (type == GST_CAMERA_CONTROL_STROBE_MODE ) { + /* using GstFlashMode enumeration + * typedef enum + * { + * GST_PHOTOGRAPHY_FLASH_MODE_AUTO = 0, + * GST_PHOTOGRAPHY_FLASH_MODE_OFF, + * GST_PHOTOGRAPHY_FLASH_MODE_ON, + * GST_PHOTOGRAPHY_FLASH_MODE_FILL_IN, + * GST_PHOTOGRAPHY_FLASH_MODE_RED_EYE + * } GstFlashMode; + */ + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.flash_mode = value; + GST_OBJECT_UNLOCK (camerasrc); + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + } + else if (type == GST_CAMERA_CONTROL_STROBE_CONTROL) { + GST_OBJECT_LOCK (camerasrc); + if( value < 2) + camerasrc->enable_torch = value; + else + camerasrc->enable_torch = FALSE; + + if (bclass->set_strobe_state) + ret = bclass->set_strobe_state(camerasrc, camerasrc->enable_torch); + GST_OBJECT_UNLOCK (camerasrc); + } + + return ret; +} +gboolean +gst_camerasrc_camera_control_get_strobe( GstCameraSrc* camerasrc, + gint type, gint *value ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + + if (type == GST_CAMERA_CONTROL_STROBE_MODE ) { + GST_OBJECT_LOCK (camerasrc); + *value = camerasrc->photoconf.flash_mode; + GST_OBJECT_UNLOCK (camerasrc); + } + else if (type == GST_CAMERA_CONTROL_STROBE_CONTROL) { + GST_OBJECT_LOCK (camerasrc); + *value = camerasrc->enable_torch; + GST_OBJECT_UNLOCK (camerasrc); + } + + return TRUE; +} + +gboolean +gst_camerasrc_camera_control_set_detect( GstCameraSrc* camerasrc, + gint type, gint value ) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_detect( GstCameraSrc* camerasrc, + gint type, gint* value ) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_set_zoom( GstCameraSrc* camerasrc, + gint type, gint value ) +{ + GstCameraSrcClass *bclass; + gboolean ret = FALSE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if ((type == GST_CAMERA_CONTROL_DIGITAL_ZOOM) && (value >= 100)) { + GST_OBJECT_LOCK (camerasrc); + /* use 2 desimals to get accurate(smooth) enough zoom */ + /* photography interface zoom is used */ + camerasrc->photoconf.zoom = (gfloat)(value/100); + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "setting zoom to %f", camerasrc->photoconf.zoom); + + if (bclass->set_zoom) { + ret = bclass->set_zoom (camerasrc, camerasrc->photoconf.zoom); + } + GST_DEBUG_OBJECT (camerasrc, "setting zoom %s", ret ? "success" : "failed"); + } + else + GST_DEBUG_OBJECT (camerasrc, "Invalid Zoom control, type: %d value: %d",type,value); + + return ret; +} + +gboolean +gst_camerasrc_camera_control_get_zoom( GstCameraSrc* camerasrc, + gint type, gint* value ) +{ + gboolean ret = FALSE; + + if (type == GST_CAMERA_CONTROL_DIGITAL_ZOOM) { + GST_OBJECT_LOCK (camerasrc); + *value = (gint)(camerasrc->photoconf.zoom * 100); + GST_OBJECT_UNLOCK (camerasrc); + GST_DEBUG_OBJECT (camerasrc, "current zoom = %f", *value); + ret = TRUE; + } + return ret; +} + +gboolean +gst_camerasrc_camera_control_set_focus( GstCameraSrc* camerasrc, + gint mode, gint range ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + GstFocusMode focus_mode; + gboolean ret = TRUE; + + GST_DEBUG_OBJECT (camerasrc, "gst_camerasrc_camera_control_set_focus: %d", mode); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + // Map focus to photography definitions + switch (mode) { + case MM_CAMCORDER_FOCUS_MODE_NONE: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY; + break; + case MM_CAMCORDER_FOCUS_MODE_AUTO: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_AUTO; + break; + case MM_CAMCORDER_FOCUS_MODE_TOUCH_AUTO: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_PORTRAIT; + break; + case MM_CAMCORDER_FOCUS_MODE_PAN: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL; + break; + case MM_CAMCORDER_FOCUS_MODE_MANUAL: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY; + break; + case MM_CAMCORDER_FOCUS_MODE_CONTINUOUS: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL ; + break; + default: + focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY; + break; + } + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.focus_mode = focus_mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + + return ret; +} + +gboolean +gst_camerasrc_camera_control_get_focus( GstCameraSrc* camerasrc, + gint* mode, gint* range ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + GST_DEBUG_OBJECT (camerasrc, "gst_camerasrc_camera_control_get_focus"); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + // Map focus from photography definitions + GST_OBJECT_LOCK (camerasrc); + switch (camerasrc->photoconf.focus_mode) { + case GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY: + *mode = MM_CAMCORDER_FOCUS_MODE_NONE; + break; + case GST_PHOTOGRAPHY_FOCUS_MODE_AUTO: + *mode = MM_CAMCORDER_FOCUS_MODE_AUTO; + break; + case GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL: + *mode = MM_CAMCORDER_FOCUS_MODE_PAN; + break; + case GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL: + *mode = MM_CAMCORDER_FOCUS_MODE_CONTINUOUS; + break; + default: + *mode = MM_CAMCORDER_FOCUS_MODE_NONE; + break; + } + GST_OBJECT_UNLOCK (camerasrc); + *range = 0; + + return TRUE; +} + +gboolean +gst_camerasrc_camera_control_start_auto_focus( GstCameraSrc* camerasrc) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + GST_DEBUG_OBJECT (camerasrc, "setting autofocus ON"); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + g_mutex_lock (camerasrc->af_lock); + camerasrc->requested_af_mode = AF_ON_REQUESTED; + g_mutex_unlock (camerasrc->af_lock); + + return TRUE; +} + +gboolean +gst_camerasrc_camera_control_stop_auto_focus( GstCameraSrc* camerasrc) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + GST_DEBUG_OBJECT (camerasrc, "setting autofocus OFF"); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + g_mutex_lock (camerasrc->af_lock); + camerasrc->requested_af_mode = AF_OFF_REQUESTED; + g_mutex_unlock (camerasrc->af_lock); + + return TRUE; +} +gboolean +gst_camerasrc_camera_control_set_focus_level( GstCameraSrc* camerasrc, + gint manual_level ) +{ + // Manual focus not supported atm + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_focus_level( GstCameraSrc* camerasrc, + gint* manual_level ) +{ + // Manual focus not supported atm + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_set_auto_focus_area( GstCameraSrc* camerasrc, + GstCameraControlRectType rect ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + int ret = FALSE; + + GST_DEBUG_OBJECT (camerasrc, "setting autofocus area"); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + g_mutex_lock (camerasrc->af_lock); + camerasrc->requested_af_mode = AF_OFF_REQUESTED; + // camcorder just gives a pixel not area. + // from camcorder x = 0...width y = 0 ...height + + GST_DEBUG_OBJECT (camerasrc, "Current width height (%dx%d)", + camerasrc->current_w, camerasrc->current_h); + + camerasrc->cam_ctrl.aaa_window.x_left = (rect.x - 20) < 0 ? rect.x : (rect.x - 20); + camerasrc->cam_ctrl.aaa_window.x_right = (rect.width + rect. x + 20) > camerasrc->current_w ? rect. x : (rect.width + rect. x + 20); + camerasrc->cam_ctrl.aaa_window.y_top = (rect.y - 20) < 0 ? rect.y : (rect.y - 20); + camerasrc->cam_ctrl.aaa_window.y_bottom = (rect.height + rect.y + 20) > camerasrc->current_h ? rect.y : (rect.height + rect.y + 20); + camerasrc->cam_ctrl.aaa_window.weight = 400; + + + if (bclass->set_AeAafwindow) { + ret = bclass->set_AeAafwindow(camerasrc, camerasrc->cam_ctrl.aaa_window); + } + g_mutex_unlock (camerasrc->af_lock); + + return ret; +} + +gboolean +gst_camerasrc_camera_control_get_auto_focus_area( GstCameraSrc* camerasrc, + GstCameraControlRectType* rect ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + + GST_DEBUG_OBJECT (camerasrc, "getting autofocus area"); + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + g_mutex_lock (camerasrc->af_lock); + camerasrc->requested_af_mode = AF_OFF_REQUESTED; + rect->x = camerasrc->cam_ctrl.aaa_window.x_left; + rect->width = camerasrc->cam_ctrl.aaa_window.x_right - camerasrc->cam_ctrl.aaa_window.x_left; + rect->y = camerasrc->cam_ctrl.aaa_window.y_top; + rect->height = camerasrc->cam_ctrl.aaa_window.y_bottom - camerasrc->cam_ctrl.aaa_window.y_top; + g_mutex_unlock (camerasrc->af_lock); + + return TRUE; +} + +gboolean +gst_camerasrc_camera_control_set_wdr( GstCameraSrc* camerasrc, gint value) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_wdr( GstCameraSrc* camerasrc, gint* value) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_set_ahs( GstCameraSrc* camerasrc, gint value) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_ahs( GstCameraSrc* camerasrc, gint* value) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_set_part_color( GstCameraSrc* camerasrc, + gint type, gint value) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_part_color( GstCameraSrc* camerasrc, + gint type, gint* value) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_exif_info( GstCameraSrc* camerasrc, + GstCameraControlExifInfo* info) +{ + + GstCameraSrcClass *bclass; + gboolean ret = FALSE; + gboolean opened; + GstCameraControlExifInfo read_info; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_val_if_fail (opened, FALSE); + + ret = bclass->read_exif (camerasrc, &read_info); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->cam_ctrl.exif_info = read_info; + *info = camerasrc->cam_ctrl.exif_info; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got exif info, ret = %d", ret); + + return ret; +} + +gboolean +gst_camerasrc_camera_control_get_basic_dev_info( GstCameraSrc* camerasrc, + gint dev_id, GstCameraControlCapsInfoType* info) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_misc_dev_info( GstCameraSrc* camerasrc, + gint dev_id, GstCameraControlCtrlListInfoType* info) +{ + return FALSE; +} + +gboolean +gst_camerasrc_camera_control_get_extra_dev_info( GstCameraSrc* camerasrc, + gint dev_id, GstCameraControlExtraInfoType* info) +{ + return FALSE; +} + +void +gst_camerasrc_camera_control_set_capture_command( GstCameraSrc* camerasrc, + GstCameraControlCaptureCommand cmd ) +{ + GstCameraSrcClass *bclass; + gboolean opened; + GstCaps *CaptureCaps = NULL; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + opened = bclass->is_open (camerasrc); + g_return_if_fail (opened); + + GST_DEBUG_OBJECT (camerasrc, "set_capture_command: %d", cmd); + + /* FIXME */ + /* no capture command for video (in the interface) */ + if (cmd == GST_CAMERA_CONTROL_CAPTURE_COMMAND_START) { + g_mutex_lock (camerasrc->state_lock); + /* Tell subclass to interrupt frame grabbing */ + if (bclass->unlock) { + bclass->unlock (camerasrc); + } + camerasrc->capture_mode = GST_CAMERA_SRC_CAPTURE_MODE_STILL; + bclass->set_capture_mode (camerasrc, camerasrc->capture_mode); + camerasrc->capture_fps_n = camerasrc->fps_n; + camerasrc->capture_fps_d = camerasrc->fps_d; + + camerasrc->preview_fourcc = camerasrc->current_fourcc; + camerasrc->preview_w = camerasrc->current_w; + camerasrc->preview_h = camerasrc->current_h; + + GST_DEBUG_OBJECT (camerasrc, "Current reso (%dx%d @ %f)", + camerasrc->current_w, camerasrc->current_h, + (gfloat) camerasrc->capture_fps_n / camerasrc->capture_fps_d); + + GST_DEBUG_OBJECT (camerasrc, "starting preparations for capture (%dx%d @ %f)", + camerasrc->capture_w, camerasrc->capture_h, + (gfloat) camerasrc->capture_fps_n / camerasrc->capture_fps_d); + GST_DEBUG ("capture fourcc 0x%08x %" GST_FOURCC_FORMAT, + camerasrc->capture_fourcc, GST_FOURCC_ARGS (camerasrc->capture_fourcc)); + + CaptureCaps = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, camerasrc->capture_fourcc, + "width", G_TYPE_INT, camerasrc->capture_w, + "height", G_TYPE_INT, camerasrc->capture_h, + "framerate", GST_TYPE_FRACTION, camerasrc->capture_fps_n, + camerasrc->capture_fps_d, NULL); + + camerasrc->capture_counter = camerasrc->capture_count; + + camerasrc->photo_capture_phase = GST_CAMERA_CAPTURE_START; + g_mutex_unlock (camerasrc->state_lock); + } + else { + g_mutex_lock (camerasrc->state_lock); + camerasrc->capture_mode = GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER; + camerasrc->capture_counter = 1; + bclass->set_capture_mode (camerasrc, camerasrc->capture_mode); + g_mutex_unlock (camerasrc->state_lock); + } +} diff --git a/gst-libs/gst/camera/gstmfldcameracontroliface.h b/gst-libs/gst/camera/gstmfldcameracontroliface.h new file mode 100644 index 0000000..5814830 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcameracontroliface.h @@ -0,0 +1,437 @@ +/* GStreamer + * + * @author: Marko Ollonen + * + * gstmfldcameracontroliface.h: Camera control interface implementation + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + * + */ + +#ifndef __GST_CAMERA_SRC_CAMERA_CONTROL_H__ +#define __GST_CAMERA_SRC_CAMERA_CONTROL_H__ + +#include +#include +#include + +#include "gstmfldcamerasrc.h" + +G_BEGIN_DECLS + +#define GST_TYPE_CAMERA_SRC_CAMERA_CONTROL_CHANNEL \ + (gst_camerasrc_camera_control_channel_get_type ()) +#define GST_CAMERA_SRC_CAMERA_CONTROL_CHANNEL(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_CAMERA_SRC_CAMERA_CONTROL_CHANNEL, \ + GstCameraSrcCameraControlChannel)) +#define GST_CAMERA_SRC_CAMERA_CONTROL_CHANNEL_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_CAMERA_SRC_CAMERA_CONTROL_CHANNEL, \ + GstCameraSrcCameraControlChannelClass)) +#define GST_IS_CAMERA_SRC_CAMERA_CONTROL_CHANNEL(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_CAMERA_SRC_CAMERA_CONTROL_CHANNEL)) +#define GST_IS_CAMERA_SRC_CAMERA_CONTROL_CHANNEL_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_CAMERA_SRC_CAMERA_CONTROL_CHANNEL)) + +/* Nicely defined Interface. These are taken from user of interface no definitions in interface itself */ +typedef enum{ + MM_CAMCORDER_FOCUS_MODE_NONE = 0, /**< Focus mode is None */ + MM_CAMCORDER_FOCUS_MODE_PAN, /**< Pan focus mode*/ + MM_CAMCORDER_FOCUS_MODE_AUTO, /**< Autofocus mode*/ + MM_CAMCORDER_FOCUS_MODE_MANUAL, /**< Manual focus mode*/ + MM_CAMCORDER_FOCUS_MODE_TOUCH_AUTO, /**< Touch Autofocus mode*/ + MM_CAMCORDER_FOCUS_MODE_CONTINUOUS, /**< Continuous Autofocus mode*/ +} MMCamcorderFocusMode; + + +typedef struct _GstCameraSrcCameraControlChannel { + GstCameraControlChannel parent; + + guint32 id; +} GstCameraSrcCameraControlChannel; + +/** + * GstCameraSrcCameraControlChannelClass: + * @parent: Element parent class + * + * #GstCameraSrcCameraControlChannelClass class object. + */ +typedef struct _GstCameraSrcCameraControlChannelClass { + GstCameraControlChannelClass parent; +} GstCameraSrcCameraControlChannelClass; + +GType gst_camerasrc_camera_control_channel_get_type (void); + +const GList * +gst_camerasrc_camera_control_list_channels (GstCameraSrc * camerasrc); + +gboolean +gst_camerasrc_camera_control_set_value(GstCameraSrc * camerasrc, + GstCameraControlChannel * control_channel ); + +gboolean +gst_camerasrc_camera_control_get_value(GstCameraSrc * camerasrc, + GstCameraControlChannel* control_channel ); + +gboolean +gst_camerasrc_camera_control_set_exposure(GstCameraSrc * camerasrc, + gint type, gint value1, gint value2 ); + +gboolean +gst_camerasrc_camera_control_get_exposure(GstCameraSrc* control, + gint type, gint* value1, gint* value2 ); + +gboolean +gst_camerasrc_camera_control_set_capture_mode(GstCameraSrc* control, + gint type, gint value ); +gboolean +gst_camerasrc_camera_control_get_capture_mode(GstCameraSrc* control, + gint type, gint* value ); + +gboolean +gst_camerasrc_camera_control_set_strobe(GstCameraSrc* camerasrc, + gint type, gint value ); + +gboolean +gst_camerasrc_camera_control_get_strobe(GstCameraSrc* camerasrc, + gint type, gint *value ); + +gboolean +gst_camerasrc_camera_control_set_detect(GstCameraSrc* camerasrc, + gint type, gint value ); + +gboolean +gst_camerasrc_camera_control_get_detect(GstCameraSrc* camerasrc, + gint type, gint* value ); + +gboolean +gst_camerasrc_camera_control_set_zoom(GstCameraSrc* camerasrc, + gint type, gint value ); +gboolean +gst_camerasrc_camera_control_get_zoom(GstCameraSrc* camerasrc, + gint type, gint* value ); +gboolean +gst_camerasrc_camera_control_set_focus(GstCameraSrc* camerasrc, + gint mode, gint range ); + +gboolean +gst_camerasrc_camera_control_get_focus(GstCameraSrc* camerasrc, + gint* mode, gint* range ); + +gboolean +gst_camerasrc_camera_control_start_auto_focus(GstCameraSrc* camerasrc); + +gboolean +gst_camerasrc_camera_control_stop_auto_focus(GstCameraSrc* camerasrc); + +gboolean +gst_camerasrc_camera_control_set_focus_level(GstCameraSrc* camerasrc, + gint manual_level ); +gboolean +gst_camerasrc_camera_control_get_focus_level(GstCameraSrc* camerasrc, + gint* manual_level ); + +gboolean +gst_camerasrc_camera_control_set_auto_focus_area(GstCameraSrc* camerasrc, + GstCameraControlRectType rect ); + +gboolean +gst_camerasrc_camera_control_get_auto_focus_area(GstCameraSrc* camerasrc, + GstCameraControlRectType* rect ); + +gboolean +gst_camerasrc_camera_control_set_wdr(GstCameraSrc* camerasrc, gint value); + +gboolean +gst_camerasrc_camera_control_get_wdr(GstCameraSrc* camerasrc, gint* value); + +gboolean +gst_camerasrc_camera_control_set_ahs(GstCameraSrc* camerasrc, gint value); + +gboolean +gst_camerasrc_camera_control_get_ahs(GstCameraSrc* camerasrc, gint* value); + +gboolean +gst_camerasrc_camera_control_set_part_color(GstCameraSrc* camerasrc, + gint type, gint value); + +gboolean +gst_camerasrc_camera_control_get_part_color(GstCameraSrc* camerasrc, + gint type, gint* value); + +gboolean +gst_camerasrc_camera_control_get_exif_info(GstCameraSrc* camerasrc, + GstCameraControlExifInfo* info); + +gboolean +gst_camerasrc_camera_control_get_basic_dev_info(GstCameraSrc* camerasrc, + gint dev_id, + GstCameraControlCapsInfoType* info); + +gboolean +gst_camerasrc_camera_control_get_misc_dev_info(GstCameraSrc* camerasrc, + gint dev_id, + GstCameraControlCtrlListInfoType* info); + +gboolean +gst_camerasrc_camera_control_get_extra_dev_info(GstCameraSrc* camerasrc, + gint dev_id, + GstCameraControlExtraInfoType* info); + +void +gst_camerasrc_camera_control_set_capture_command(GstCameraSrc* camerasrc, + GstCameraControlCaptureCommand cmd ); + + +#define GST_IMPLEMENT_CAMERA_SRC_CAMERA_CONTROL_METHODS(Type, interface_as_function) \ + \ +static const GList * \ +interface_as_function ## _camera_control_list_channels (GstCameraControl * control) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_list_channels(this); \ +} \ + \ +static gint \ +interface_as_function ## _camera_control_get_value (GstCameraControl *control, \ + GstCameraControlChannel *control_channel ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_value(this, control_channel); \ +} \ + \ +static gboolean \ +interface_as_function ## _camera_control_set_value( GstCameraControl *control, \ + GstCameraControlChannel *control_channel ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_value(this, control_channel); \ +} \ + \ +static gboolean \ +interface_as_function ## _camera_control_set_exposure( GstCameraControl* control, \ + gint type, gint value1, gint value2 ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_exposure(this, type, value1, value2); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_exposure( GstCameraControl* control, \ + gint type, gint* value1, gint* value2 ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_exposure(this, type, value1,value2); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_capture_mode( GstCameraControl* control, \ + gint type, gint value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_capture_mode(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_capture_mode( GstCameraControl* control, \ + gint type, gint* value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_capture_mode(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_strobe( GstCameraControl* control \ + , gint type, gint value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_strobe(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_strobe( GstCameraControl* control, \ + gint type, gint *value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_strobe(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_detect( GstCameraControl* control, \ + gint type, gint value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_detect(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_detect( GstCameraControl* control, \ + gint type, gint* value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_detect(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_zoom( GstCameraControl* control, \ + gint type, gint value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_zoom(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_zoom( GstCameraControl* control, \ + gint type, gint* value ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_zoom(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_focus( GstCameraControl* control, \ + gint mode, gint range ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_focus(this, mode, range); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_focus( GstCameraControl* control, \ + gint* mode, gint* range ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_focus(this, mode, range); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_start_auto_focus( GstCameraControl* control) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_start_auto_focus(this); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_stop_auto_focus( GstCameraControl* control) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_stop_auto_focus(this); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_focus_level( GstCameraControl* control, \ + gint manual_level ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_focus_level(this, manual_level); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_focus_level( GstCameraControl* control, \ + gint* manual_level ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_focus_level(this, manual_level); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_auto_focus_area( GstCameraControl* control, \ + GstCameraControlRectType rect ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_auto_focus_area(this, rect); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_auto_focus_area( GstCameraControl* control, \ + GstCameraControlRectType* rect ) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_auto_focus_area(this, rect); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_wdr( GstCameraControl* control, gint value) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_wdr(this, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_wdr( GstCameraControl* control, gint* value) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_wdr(this, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_ahs( GstCameraControl* control, gint value) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_ahs(this,value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_ahs( GstCameraControl* control, gint* value) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_ahs(this,value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_set_part_color( GstCameraControl* control, \ + gint type, gint value) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_set_part_color(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_part_color( GstCameraControl* control, \ + gint type, gint* value) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_part_color(this, type, value); \ +} \ +static gboolean \ +interface_as_function ##_camera_control_get_exif_info( GstCameraControl* control, \ + GstCameraControlExifInfo* info) \ +{ \ + Type *this = (Type*) control; \ + return gst_camerasrc_camera_control_get_exif_info(this, info); \ +} \ +static void \ +interface_as_function ##_camera_control_set_capture_command( GstCameraControl* control, \ + GstCameraControlCaptureCommand cmd ) \ +{ \ + Type *this = (Type*) control; \ + gst_camerasrc_camera_control_set_capture_command(this, cmd); \ +} \ + \ +void \ +interface_as_function ## _camera_control_interface_init (GstCameraControlClass * klass) \ +{ \ + \ + GST_CAMERA_CONTROL_TYPE (klass) = GST_CAMERA_CONTROL_HARDWARE; \ + \ + klass->list_channels = interface_as_function ## _camera_control_list_channels; \ + klass->set_exposure = interface_as_function ##_camera_control_set_exposure; \ + klass->get_exposure = interface_as_function ##_camera_control_get_exposure; \ + klass->set_capture_mode = interface_as_function ##_camera_control_set_capture_mode; \ + klass->get_capture_mode = interface_as_function ##_camera_control_get_capture_mode; \ + klass->set_strobe = interface_as_function ##_camera_control_set_strobe; \ + klass->get_strobe = interface_as_function ##_camera_control_get_strobe; \ + klass->set_detect = interface_as_function ##_camera_control_set_detect; \ + klass->get_detect = interface_as_function ##_camera_control_get_detect; \ + klass->set_value = interface_as_function ## _camera_control_set_value; \ + klass->get_value = interface_as_function ## _camera_control_get_value; \ + klass->set_zoom = interface_as_function ##_camera_control_set_zoom; \ + klass->get_zoom = interface_as_function ##_camera_control_get_zoom; \ + klass->set_focus = interface_as_function ##_camera_control_set_focus; \ + klass->get_focus = interface_as_function ##_camera_control_get_focus; \ + klass->start_auto_focus = interface_as_function ##_camera_control_start_auto_focus; \ + klass->stop_auto_focus = interface_as_function ##_camera_control_stop_auto_focus; \ + klass->set_focus_level = interface_as_function ##_camera_control_set_focus_level; \ + klass->get_focus_level = interface_as_function ##_camera_control_get_focus_level; \ + klass->set_auto_focus_area = interface_as_function ##_camera_control_set_auto_focus_area; \ + klass->get_auto_focus_area = interface_as_function ##_camera_control_get_auto_focus_area; \ + klass->set_wdr = interface_as_function ##_camera_control_set_wdr; \ + klass->get_wdr = interface_as_function ##_camera_control_get_wdr; \ + klass->set_ahs = interface_as_function ##_camera_control_set_ahs; \ + klass->get_ahs = interface_as_function ##_camera_control_get_ahs; \ + klass->set_part_color = interface_as_function ##_camera_control_set_part_color; \ + klass->get_part_color = interface_as_function ##_camera_control_get_part_color; \ + klass->get_exif_info = interface_as_function ##_camera_control_get_exif_info; \ + klass->set_capture_command = interface_as_function ##_camera_control_set_capture_command; \ +} \ + +#endif /* __GST_CAMERA_SRC_CAMERA_CONTROL_H__ */ diff --git a/gst-libs/gst/camera/gstmfldcameraphotoiface.c b/gst-libs/gst/camera/gstmfldcameraphotoiface.c new file mode 100644 index 0000000..4bfa881 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcameraphotoiface.c @@ -0,0 +1,1460 @@ +/* GStreamer + * + * Copyright (C) 2008-2010 Nokia Corporation + * + * gstcameraphotoiface.c: Photo interface implementation for camerasrc + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "gstmfldcameraphotoiface.h" +#include "gstmfldcamerasrc.h" + +GST_DEBUG_CATEGORY_EXTERN (gst_camerasrc_debug); +#define GST_CAT_DEFAULT gst_camerasrc_debug + + +/* + * + */ +GstPhotoCaps +gst_camerasrc_photo_get_capabilities (GstCameraSrc *camerasrc) +{ + GstCameraSrcClass *bclass; + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + /* FIXME: driver might not be initialized yet */ + return bclass->get_capabilities (camerasrc); +} + +gboolean +gst_camerasrc_photo_parse_capture_caps (GstCameraSrc * camerasrc, + GstCaps * op_mode_caps); + + +/* + * + */ +gboolean +gst_camerasrc_photo_prepare_for_capture (GstCameraSrc *camerasrc, + GstPhotoCapturePrepared func, + GstCaps *desired_caps, + gpointer user_data) +{ + GstCameraSrcClass *bclass; + gboolean use_vf_caps = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + g_mutex_lock (camerasrc->state_lock); + + camerasrc->prep_func = func; + camerasrc->prep_udata = user_data; + + if (!camerasrc->capture_resolution_set) { + if (desired_caps) { + GstStructure *cstr; + + GST_DEBUG_OBJECT (camerasrc, "parsing capture caps"); + + cstr = gst_caps_get_structure (desired_caps, 0); + + /* FIXME: Don't require FPS in capture caps */ + if (gst_structure_get_int (cstr, "width", + (gint *) & camerasrc->capture_w) && + gst_structure_get_int (cstr, "height", + (gint *) & camerasrc->capture_h) && + gst_structure_get_fraction (cstr, "framerate", + (gint *) & camerasrc->capture_fps_n, + (gint *) & camerasrc->capture_fps_d)) { + if (gst_structure_has_field (cstr, "format")) { + gst_structure_get_fourcc (cstr, "format", &camerasrc->capture_fourcc); + } else { + /* If color format is not set, use the viewfinder format then */ + GST_DEBUG_OBJECT (camerasrc, "using viewfinder color format"); + camerasrc->capture_fourcc = camerasrc->current_fourcc; + } + use_vf_caps = FALSE; + } + } + + if (use_vf_caps) { + GST_DEBUG_OBJECT (camerasrc, "given caps inadequate, using VF caps"); + + camerasrc->capture_w = camerasrc->current_w; + camerasrc->capture_h = camerasrc->current_h; + camerasrc->capture_fps_n = camerasrc->fps_n; + camerasrc->capture_fps_d = camerasrc->fps_d; + camerasrc->capture_fourcc = camerasrc->current_fourcc; + } + } + + if (!camerasrc->preview_resolution_set) { + if (use_vf_caps) { + GST_DEBUG_OBJECT (camerasrc, "given caps inadequate, using VF caps"); + + camerasrc->preview_w = 640; + camerasrc->preview_h = 480; /* Default preview caps */ + camerasrc->preview_fourcc = camerasrc->current_fourcc; + } + } + + GST_DEBUG_OBJECT (camerasrc, "starting preparations for capture (%dx%d @ %f)", + camerasrc->capture_w, camerasrc->capture_h, + (gfloat) camerasrc->capture_fps_n / camerasrc->capture_fps_d); + + camerasrc->photo_capture_phase = GST_CAMERA_CAPTURE_START; + + GST_DEBUG_OBJECT (camerasrc, "calling unlock"); + /* Tell subclass to interrupt frame grabbing */ + if (bclass->unlock) { + bclass->unlock (camerasrc); + } + GST_DEBUG_OBJECT (camerasrc, "unlock returned"); + + g_mutex_unlock (camerasrc->state_lock); + + return TRUE; +} + + +/* + * + */ +void +gst_camerasrc_photo_ready_for_capture (GstCameraSrc *camerasrc, + GstCaps *newcaps) +{ + GST_DEBUG_OBJECT (camerasrc, "prepare for capture is complete"); + + if (camerasrc->prep_func) { + camerasrc->prep_func (camerasrc->prep_udata, newcaps); + camerasrc->prep_func = NULL; + camerasrc->prep_udata = NULL; + } + GST_DEBUG_OBJECT (camerasrc, "callback returned"); +} + + +/* + * + */ +static char * +create_debug_string (const char *base_str, GType type, gint value) +{ + GTypeClass *t = g_type_class_ref (type); + GEnumValue *val = g_enum_get_value (G_ENUM_CLASS (t), value); + gchar *ret; + + if (val == NULL) + return "unknow value"; + + ret = g_strconcat (base_str, val->value_nick, NULL); + g_type_class_unref (t); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_ev_compensation (GstCameraSrc *camerasrc, + gfloat ev_comp) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + /* FIXME: Remove this artificial limit, let subclass handle it */ + if (ev_comp >= -2.5 && ev_comp <= 2.5) { + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.ev_compensation = ev_comp; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + GST_DEBUG_OBJECT (camerasrc, "set EV: %.2f, ret = %d", ev_comp, ret); + } else { + GST_DEBUG_OBJECT (camerasrc, "requested ev compensation value out of range"); + ret = FALSE; + } + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_ev_compensation (GstCameraSrc *camerasrc, + gfloat *ev_comp) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *ev_comp = camerasrc->photoconf.ev_compensation; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got ev compensation: %.2f, ret = %d", + *ev_comp, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_iso_speed (GstCameraSrc *camerasrc, + guint iso_speed) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.iso_speed = iso_speed; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + GST_DEBUG_OBJECT (camerasrc, "set ISO: %d, ret = %d", iso_speed, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_iso_speed (GstCameraSrc *camerasrc, + guint *iso_speed) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *iso_speed = camerasrc->photoconf.iso_speed; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got iso speed: %d, ret = %d", *iso_speed, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_aperture (GstCameraSrc *camerasrc, + guint aperture) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.aperture = aperture; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + GST_DEBUG_OBJECT (camerasrc, "set aperture: %d, ret = %d", aperture, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_aperture (GstCameraSrc *camerasrc, + guint *aperture) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *aperture = camerasrc->photoconf.aperture; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got aperture: %d, ret = %d", *aperture, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_exposure (GstCameraSrc *camerasrc, + guint32 exposure) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.exposure = exposure; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + GST_DEBUG_OBJECT (camerasrc, "set exposure: %d, ret = %d", exposure, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_exposure (GstCameraSrc *camerasrc, + guint32 *exposure) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *exposure = camerasrc->photoconf.exposure; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got exposure: %d, ret = %d", *exposure, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_wb_mode (GstCameraSrc *camerasrc, + GstWhiteBalanceMode mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + gchar *dstr; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.wb_mode = mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + dstr = create_debug_string ("AWB:", GST_TYPE_WHITE_BALANCE_MODE, mode); + GST_DEBUG_OBJECT (camerasrc, "set %s, ret = %d", dstr, ret); + g_free (dstr); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_wb_mode (GstCameraSrc *camerasrc, + GstWhiteBalanceMode *mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *mode = camerasrc->photoconf.wb_mode; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got AWB mode:%d, ret = %d", *mode, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_tone_mode (GstCameraSrc *camerasrc, + GstColourToneMode mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + gchar *dstr; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.tone_mode = mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + dstr = create_debug_string ("tone:", GST_TYPE_COLOUR_TONE_MODE, mode); + GST_DEBUG_OBJECT (camerasrc, "set %s, ret = %d", dstr, ret); + g_free (dstr); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_tone_mode (GstCameraSrc *camerasrc, + GstColourToneMode *mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *mode = camerasrc->photoconf.tone_mode; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got tone mode: %d, ret = %d", *mode, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_scene_mode (GstCameraSrc *camerasrc, + GstSceneMode mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + gchar *dstr; + GParamSpec **properties; + gpointer photo_iface; + guint i, n_properties = 0; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.scene_mode = mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, TRUE); + + if (ret) { + /* Read the changed parameters to local settings cache */ + bclass->read_settings (camerasrc, &camerasrc->photoconf); + /* Notify that changing scene mode might have changed also other + interface properties */ + GST_DEBUG_OBJECT (camerasrc, "notifying interface property changes"); + photo_iface = g_type_default_interface_ref (GST_TYPE_PHOTOGRAPHY); + properties = + g_object_interface_list_properties (photo_iface, &n_properties); + if (properties) { + for (i = 0; i < n_properties; i++) { + const gchar *name = g_param_spec_get_name (properties[i]); + g_object_notify (G_OBJECT (camerasrc), name); + } + g_free (properties); + } + g_type_default_interface_unref (photo_iface); + } + } + dstr = create_debug_string ("scene:", GST_TYPE_SCENE_MODE, mode); + GST_DEBUG_OBJECT (camerasrc, "set %s, ret = %d", dstr, ret); + g_free (dstr); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_scene_mode (GstCameraSrc *camerasrc, + GstSceneMode *mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *mode = camerasrc->photoconf.scene_mode; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got scene mode: %d, ret = %d", *mode, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_flash_mode (GstCameraSrc *camerasrc, + GstFlashMode mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + gchar *dstr; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.flash_mode = mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + dstr = create_debug_string ("flash:", GST_TYPE_FLASH_MODE, mode); + GST_DEBUG_OBJECT (camerasrc, "set %s, ret = %d", dstr, ret); + g_free (dstr); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_flash_mode (GstCameraSrc *camerasrc, + GstFlashMode *mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *mode = camerasrc->photoconf.flash_mode; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got flash mode: %d, ret = %d", *mode, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_zoom (GstCameraSrc *camerasrc, gfloat zoom) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.zoom = zoom; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "setting zoom to %f", camerasrc->photoconf.zoom); + + if (bclass->set_zoom) { + ret = bclass->set_zoom (camerasrc, camerasrc->photoconf.zoom); + } + else { + /* FIXME: Check if the zoom is within known limits */ + } + GST_DEBUG_OBJECT (camerasrc, "setting zoom %s", ret ? "success" : "failed"); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_zoom (GstCameraSrc *camerasrc, gfloat *zoom) +{ + GST_OBJECT_LOCK (camerasrc); + *zoom = camerasrc->photoconf.zoom; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "current zoom = %f", *zoom); + return TRUE; +} + + +/* +* +*/ +gboolean +gst_camerasrc_photo_set_flicker_mode (GstCameraSrc *camerasrc, + GstFlickerReductionMode mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + gchar *dstr; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.flicker_mode = mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + dstr = create_debug_string ("flicker mode:", + GST_TYPE_FLICKER_REDUCTION_MODE, mode); + + GST_DEBUG_OBJECT (camerasrc, "set %s, ret = %d", dstr, ret); + g_free (dstr); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_flicker_mode (GstCameraSrc *camerasrc, + GstFlickerReductionMode *mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *mode = camerasrc->photoconf.flicker_mode; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got flicker mode: %d, ret = %d", *mode, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_focus_mode (GstCameraSrc *camerasrc, + GstFocusMode mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + gchar *dstr; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_OBJECT_LOCK (camerasrc); + camerasrc->photoconf.focus_mode = mode; + GST_OBJECT_UNLOCK (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + dstr = create_debug_string ("focus mode:", GST_TYPE_FOCUS_MODE, mode); + GST_DEBUG_OBJECT (camerasrc, "set %s, ret = %d", dstr, ret); + g_free (dstr); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_focus_mode (GstCameraSrc *camerasrc, + GstFocusMode *mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + + GST_OBJECT_LOCK (camerasrc); + *mode = camerasrc->photoconf.focus_mode; + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got focus mode: %d, ret = %d", *mode, ret); + + return ret; +} + +/* + * + */ +gboolean +gst_camerasrc_photo_set_noise_reduction (GstCameraSrc * camerasrc, guint mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + camerasrc->photoconf.noise_reduction = mode; + + if (bclass->is_active (camerasrc)) { + ret = bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE); + } + GST_DEBUG_OBJECT (camerasrc, "set noise reduction: %d, ret = %d", mode, ret); + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_noise_reduction (GstCameraSrc * camerasrc, + GstPhotographyNoiseReduction * mode) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + ret = bclass->read_settings (camerasrc, &camerasrc->photoconf); + } + *mode = camerasrc->photoconf.noise_reduction; + GST_DEBUG_OBJECT (camerasrc, "got noise reduction mode: %d, ret = %d", mode, + ret); + + return ret; +} + +/* + * + */ +void +gst_camerasrc_photo_set_autofocus (GstCameraSrc *camerasrc, + gboolean on) +{ + GST_DEBUG_OBJECT (camerasrc, "setting autofocus %s", on ? "ON" : "OFF"); + + g_mutex_lock (camerasrc->af_lock); + + if (on) { + camerasrc->requested_af_mode = AF_ON_REQUESTED; + } + else { + camerasrc->requested_af_mode = AF_OFF_REQUESTED; + } + + g_mutex_unlock (camerasrc->af_lock); + + GST_DEBUG_OBJECT (camerasrc, "setting autofocus done"); +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_config (GstCameraSrc *camerasrc, + GstPhotoSettings * config) +{ + GstCameraSrcClass *bclass; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (bclass->is_active (camerasrc)) { + gboolean scene_override = + config->scene_mode != GST_PHOTOGRAPHY_SCENE_MODE_MANUAL ? TRUE : FALSE; + + ret = bclass->write_settings (camerasrc, config, scene_override); + + if (ret && scene_override) { + ret = bclass->read_settings (camerasrc, config); + } + } + GST_DEBUG_OBJECT (camerasrc, "set config, ret = %d", ret); + + if (ret) { + GST_OBJECT_LOCK (camerasrc); + memcpy (&camerasrc->photoconf, config, sizeof (GstPhotoSettings)); + GST_OBJECT_UNLOCK (camerasrc); + } + + return ret; + +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_config (GstCameraSrc *camerasrc, + GstPhotoSettings * config) +{ + GST_OBJECT_LOCK (camerasrc); + memcpy (config, &camerasrc->photoconf, sizeof (GstPhotoSettings)); + GST_OBJECT_UNLOCK (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "got config"); + return TRUE; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_format (GstCameraSrc *camerasrc, + GstOperationMode op_mode, + GstCaps * op_mode_caps) +{ + gboolean ret = TRUE; + + GST_DEBUG_OBJECT (camerasrc, "Caps received: %" GST_PTR_FORMAT, op_mode_caps); + + if (op_mode == GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE) { + ret = gst_camerasrc_photo_parse_capture_caps (camerasrc, op_mode_caps); + } else if (op_mode == GST_PHOTOGRAPHY_OPERATION_MODE_PREVIEW) { + ret = gst_camerasrc_photo_parse_preview_caps (camerasrc, op_mode_caps); + } else { + GST_WARNING ("Trying to set unsupported operation mode"); + ret = FALSE; + } + + return ret; +} + + +/* + * + */ +GstCaps * +gst_camerasrc_photo_get_format (GstCameraSrc * camerasrc, + GstOperationMode op_mode) +{ + GstCaps *ret = NULL; + + /* FIXME: Check if the format is set or not */ + + if (op_mode == GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE && + camerasrc->capture_resolution_set) { + ret = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, camerasrc->capture_fourcc, + "width", G_TYPE_INT, camerasrc->capture_w, + "height", G_TYPE_INT, camerasrc->capture_h, + "framerate", GST_TYPE_FRACTION, camerasrc->capture_fps_n, + camerasrc->capture_fps_d, NULL); + + GST_DEBUG_OBJECT (camerasrc, "get format (capture): %" GST_PTR_FORMAT, ret); + } else if (op_mode == GST_PHOTOGRAPHY_OPERATION_MODE_PREVIEW && + camerasrc->preview_resolution_set) { + ret = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, camerasrc->preview_fourcc, + "width", G_TYPE_INT, camerasrc->preview_w, + "height", G_TYPE_INT, camerasrc->preview_h, NULL); + + GST_DEBUG_OBJECT (camerasrc, "get format (preview): %" GST_PTR_FORMAT, ret); + } + + return ret; +} + + + +/* + * + */ +gboolean +gst_camerasrc_photo_get_property (GstCameraSrc *camerasrc, + guint prop_id, GValue * value) +{ + GstCameraSrcClass *bclass; + gboolean ret = FALSE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + switch (prop_id) { + case PROP_WB_MODE: + { + GstWhiteBalanceMode wb_mode; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_WB_MODE ===="); + if (gst_camerasrc_photo_get_wb_mode (camerasrc, &wb_mode)) { + g_value_set_enum (value, wb_mode); + } + ret = TRUE; + break; + } + case PROP_COLOUR_TONE: + { + GstColourToneMode tone; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_COLOUR_TONE ===="); + if (gst_camerasrc_photo_get_tone_mode (camerasrc, &tone)) { + g_value_set_enum (value, tone); + } + ret = TRUE; + break; + } + case PROP_SCENE_MODE: + { + GstSceneMode scene; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_SCENE_MODE ===="); + if (gst_camerasrc_photo_get_scene_mode (camerasrc, &scene)) { + g_value_set_enum (value, scene); + } + ret = TRUE; + break; + } + case PROP_FLASH_MODE: + { + GstFlashMode flash; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_FLASH_MODE ===="); + if (gst_camerasrc_photo_get_flash_mode (camerasrc, &flash)) { + g_value_set_enum (value, flash); + } + ret = TRUE; + break; + } + case PROP_CAPABILITIES: + { + gulong capabilities; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_CAPABILITIES ===="); + capabilities = (gulong) gst_camerasrc_photo_get_capabilities (camerasrc); + g_value_set_ulong (value, capabilities); + ret = TRUE; + break; + } + case PROP_EV_COMP: + { + gfloat ev_comp; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_EV_COMP ===="); + if (gst_camerasrc_photo_get_ev_compensation (camerasrc, &ev_comp)) { + g_value_set_float (value, ev_comp); + } + ret = TRUE; + break; + } + case PROP_ISO_SPEED: + { + guint iso_speed; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_ISO_SPEED ===="); + if (gst_camerasrc_photo_get_iso_speed (camerasrc, &iso_speed)) { + g_value_set_uint (value, iso_speed); + } + ret = TRUE; + break; + } + case PROP_APERTURE: + { + guint aperture; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_APERTURE ===="); + if (gst_camerasrc_photo_get_aperture (camerasrc, &aperture)) { + g_value_set_uint (value, aperture); + } + ret = TRUE; + break; + } + case PROP_EXPOSURE: + { + guint32 exposure; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_EXPOSURE ===="); + if (gst_camerasrc_photo_get_exposure (camerasrc, &exposure)) { + g_value_set_uint (value, exposure); + } + ret = TRUE; + break; + } + case PROP_ZOOM: + { + gfloat zoom; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_ZOOM ===="); + if (gst_camerasrc_photo_get_zoom (camerasrc, &zoom)) { + g_value_set_float (value, zoom); + } + ret = TRUE; + break; + } + case PROP_FLICKER_MODE: + { + GstFlickerReductionMode flicker_mode; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_FLICKER_MODE ===="); + if (gst_camerasrc_photo_get_flicker_mode (camerasrc, &flicker_mode)) { + g_value_set_enum (value, flicker_mode); + } + ret = TRUE; + break; + } + case PROP_FOCUS_MODE: + { + GstFocusMode focus_mode; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_FOCUS_MODE ===="); + if (gst_camerasrc_photo_get_focus_mode (camerasrc, &focus_mode)) { + g_value_set_enum (value, focus_mode); + } + ret = TRUE; + break; + } + case PROP_IMAGE_CAPTURE_CAPS: + { + GstCaps *caps; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_IMAGE_CAPTURE_CAPS ===="); + caps = bclass->get_supported_caps (camerasrc, + GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE); + if (!caps) { + caps = gst_caps_new_empty (); + } + gst_value_set_caps (value, caps); + gst_caps_unref (caps); + ret = TRUE; + break; + } + case PROP_IMAGE_PREVIEW_CAPS: + { + GstCaps *caps; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_IMAGE_PREVIEW_CAPS ===="); + caps = bclass->get_supported_caps (camerasrc, + GST_PHOTOGRAPHY_OPERATION_MODE_PREVIEW); + if (!caps) { + caps = gst_caps_new_empty (); + } + gst_value_set_caps (value, caps); + gst_caps_unref (caps); + ret = TRUE; + break; + } + case PROP_AUTOFOCUS: + { + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_AUTOFOCUS ===="); + g_mutex_lock (camerasrc->af_lock); + if (camerasrc->photo_capture_phase == GST_CAMERA_AUTOFOCUS || + camerasrc->requested_af_mode == AF_ON_REQUESTED) + { + g_value_set_boolean (value, TRUE); + } else { + g_value_set_boolean (value, FALSE); + } + g_mutex_unlock (camerasrc->af_lock); + + ret = TRUE; + break; + } + case PROP_NOISE_REDUCTION: + { + GstPhotographyNoiseReduction noise_reduction; + GST_DEBUG_OBJECT (camerasrc, "==== GETTING PROP_NOISE_REDUCTION ===="); + if (gst_camerasrc_photo_get_noise_reduction (camerasrc, &noise_reduction)) { + g_value_set_flags (value, noise_reduction); + } + ret = TRUE; + break; + } + default: + break; + } + + return ret; +} + + +/* + * + */ +gboolean +gst_camerasrc_photo_set_property (GstCameraSrc *camerasrc, + guint prop_id, const GValue * value) +{ + gboolean ret = FALSE; + + switch (prop_id) { + case PROP_WB_MODE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_WB_MODE ===="); + gst_camerasrc_photo_set_wb_mode (camerasrc, g_value_get_enum (value)); + ret = TRUE; + break; + case PROP_COLOUR_TONE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_COLOUR_TONE ===="); + gst_camerasrc_photo_set_tone_mode (camerasrc, g_value_get_enum (value)); + ret = TRUE; + break; + case PROP_SCENE_MODE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_SCENE_MODE ===="); + gst_camerasrc_photo_set_scene_mode (camerasrc, g_value_get_enum (value)); + ret = TRUE; + break; + case PROP_FLASH_MODE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_FLASH_MODE ===="); + gst_camerasrc_photo_set_flash_mode (camerasrc, g_value_get_enum (value)); + ret = TRUE; + break; + case PROP_EV_COMP: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_EV_COMP ===="); + gst_camerasrc_photo_set_ev_compensation (camerasrc, g_value_get_float (value)); + ret = TRUE; + break; + case PROP_ISO_SPEED: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_ISO_SPEED ===="); + gst_camerasrc_photo_set_iso_speed (camerasrc, g_value_get_uint (value)); + ret = TRUE; + break; + case PROP_APERTURE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_APERTURE ===="); + gst_camerasrc_photo_set_aperture (camerasrc, g_value_get_uint (value)); + ret = TRUE; + break; + case PROP_EXPOSURE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_EXPOSURE ===="); + gst_camerasrc_photo_set_exposure (camerasrc, g_value_get_uint (value)); + ret = TRUE; + break; + case PROP_ZOOM: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_ZOOM ===="); + gst_camerasrc_photo_set_zoom (camerasrc, g_value_get_float (value)); + ret = TRUE; + break; + case PROP_FLICKER_MODE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_FLICKER_MODE ===="); + gst_camerasrc_photo_set_flicker_mode (camerasrc, + g_value_get_enum (value)); + ret = TRUE; + break; + case PROP_FOCUS_MODE: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_FOCUS_MODE ===="); + gst_camerasrc_photo_set_focus_mode (camerasrc, g_value_get_enum (value)); + ret = TRUE; + break; + case PROP_NOISE_REDUCTION: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_NOISE_REDUCTION ===="); + gst_camerasrc_photo_set_noise_reduction (camerasrc, + g_value_get_flags (value)); + ret = TRUE; + break; + case PROP_AUTOFOCUS: + GST_DEBUG_OBJECT (camerasrc, "==== SETTING PROP_AUTOFOCUS ===="); + gst_camerasrc_photo_set_autofocus (camerasrc, + g_value_get_boolean (value)); + ret = TRUE; + break; + default: + break; + } + + return ret; +} + + +/*** Internal API ***/ + +gboolean +gst_camerasrc_photo_parse_capture_caps (GstCameraSrc * camerasrc, + GstCaps * op_mode_caps) +{ + GstCameraSrcClass *bclass; + + gboolean ret = TRUE; + GstStructure *cstr; + guint32 fcc_format; + gint tmp_fps_n = 0; + gint tmp_fps_d = 0; + gint tmp_w = 0; + gint tmp_h = 0; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "Parsing image capture caps"); + + if (op_mode_caps == NULL) { + camerasrc->capture_resolution_set = FALSE; + GST_DEBUG_OBJECT (camerasrc, "NULL caps received for image capture"); + goto done; + } + + cstr = gst_caps_get_structure (op_mode_caps, 0); + + /* FIXME: Use VF format if fourcc is not given */ + /* FIXME: Don't require FPS here */ + if (gst_structure_get_fourcc (cstr, "format", &fcc_format) && + gst_structure_get_int (cstr, "width", &tmp_w) && + gst_structure_get_int (cstr, "height", &tmp_h) && + gst_structure_get_fraction (cstr, "framerate", &tmp_fps_n, &tmp_fps_d)) { + /* check if the requested fourcc format is supported */ + GstCaps *s_caps = NULL; + + /* get camsrc sourcepad caps, because the pad has + * caps-template including supported formats */ + s_caps = gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD + (camerasrc))); + + /* if intersection is empty -> provided caps not supported */ + if (!gst_caps_can_intersect (op_mode_caps, s_caps)) { + GST_WARNING_OBJECT (camerasrc, + "Unsupported fourcc format provided by caller"); + ret = FALSE; + } + + /* Check that the requested resolution is not the same as what is + * currently configured to be in use. If so, no need to set it again. + */ + if ((camerasrc->capture_w != tmp_w || + camerasrc->capture_h != tmp_h || + camerasrc->capture_fourcc != fcc_format) && ret == TRUE) { + GST_DEBUG_OBJECT (camerasrc, + "set width: %d , height: %d , fps_n: %d , fps_d :%d , format: %" + GST_FOURCC_FORMAT, tmp_w, tmp_h, tmp_fps_n, tmp_fps_d, + GST_FOURCC_ARGS (fcc_format)); + + ret = bclass->set_capture (camerasrc, + GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE, FALSE, &fcc_format, + (guint *) & tmp_w, (guint *) & tmp_h, NULL, NULL); + + if (ret) { + camerasrc->capture_w = tmp_w; + camerasrc->capture_h = tmp_h; + camerasrc->capture_fps_n = tmp_fps_n; + camerasrc->capture_fps_d = tmp_fps_d; + camerasrc->capture_fourcc = fcc_format; + camerasrc->capture_resolution_set = TRUE; + + /* Write the assigned values back to the caps structure if possible */ + if (GST_CAPS_REFCOUNT_VALUE (op_mode_caps) == 1) { + gst_caps_set_simple (op_mode_caps, + "width", G_TYPE_INT, camerasrc->capture_w, + "height", G_TYPE_INT, camerasrc->capture_h, + "format", GST_TYPE_FOURCC, camerasrc->capture_fourcc, NULL); + } + + /* It may not be possible to create the preview with previously + * given resolution. Therefore we cancel the preview creation */ + gst_camerasrc_photo_parse_preview_caps (camerasrc, NULL); + + /* Notify that supported preview caps may have changed */ + g_object_notify (G_OBJECT (camerasrc), + GST_PHOTOGRAPHY_PROP_IMAGE_PREVIEW_SUPPORTED_CAPS); + } + } else { + GST_DEBUG_OBJECT (camerasrc, "format not set"); + ret = FALSE; + } + if (s_caps) + gst_caps_unref (s_caps); + + } else { + GST_DEBUG_OBJECT (camerasrc, "Unable to parse given caps"); + ret = FALSE; + } + +done: + + return ret; +} + + +gboolean +gst_camerasrc_photo_parse_preview_caps (GstCameraSrc *camerasrc, + GstCaps *op_mode_caps) +{ + GstCameraSrcClass *bclass; + gboolean ret = FALSE; + guint32 fourcc = 0; + gint tmp_w = 0; + gint tmp_h = 0; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (op_mode_caps == NULL) { + /* Setting NULL caps means canceling the preview image creation process. + * In this case resolution 0x0 will be given to subclass */ + GST_DEBUG_OBJECT (camerasrc, "NULL caps received for preview image"); + ret = TRUE; + } + else { + GstStructure *cstr; + + GST_DEBUG_OBJECT (camerasrc, "parsing preview caps"); + + cstr = gst_caps_get_structure (op_mode_caps, 0); + + /* FIXME: Use VF format if fourcc is not given */ + if (gst_structure_get_fourcc (cstr, "format", &fourcc) && + gst_structure_get_int (cstr, "width", &tmp_w) && + gst_structure_get_int (cstr, "height", &tmp_h)) + { +#if 0 + /* check if the requested fourcc format is supported */ + GstCaps * s_caps = NULL; + GstCaps * r_caps = NULL; + + /* get camsrc sourcepad caps, because the pad has + * caps-template including supported formats */ + s_caps = gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD + (camerasrc))); + /* take an intersection between sourcepad caps and provided caps */ + r_caps = gst_caps_intersect (op_mode_caps, s_caps); + + /* if EMPTY or NULL => provided caps not supported as not set in sourcepad */ + if (r_caps == NULL || gst_caps_is_empty (r_caps)) { + GST_WARNING_OBJECT (camerasrc, + "Unsupported fourcc format provided by caller"); + ret = FALSE; + } + if (s_caps) + gst_caps_unref (s_caps); + if (r_caps) + gst_caps_unref (r_caps); +#endif + ret = TRUE; + } + else { + GST_DEBUG_OBJECT (camerasrc, "Unable to parse given caps"); + ret = FALSE; + } + } + + /* Check that the requested resolution is not the same as what is + * currently configured to be in use. If so, no need to set it again. + */ + if (ret && (camerasrc->preview_w != tmp_w || camerasrc->preview_h != tmp_h || + camerasrc->preview_fourcc != fourcc)) + { + GST_DEBUG_OBJECT (camerasrc, + "set preview width: %d, height: %d, format: %" + GST_FOURCC_FORMAT, tmp_w, tmp_h, GST_FOURCC_ARGS (fourcc)); + + ret = bclass->set_capture (camerasrc, + GST_PHOTOGRAPHY_OPERATION_MODE_PREVIEW, FALSE, + &fourcc, (guint *) &tmp_w, (guint *) &tmp_h, NULL, NULL); + + if (fourcc == 0) { + /* Special case: preview image creation is canceled (NULL caps)*/ + camerasrc->preview_resolution_set = FALSE; + ret = TRUE; + } + else { + if (ret && op_mode_caps && GST_CAPS_REFCOUNT_VALUE (op_mode_caps) == 1) { + /* Write the assigned values back to the caps structure */ + gst_caps_set_simple (op_mode_caps, + "width", G_TYPE_INT, tmp_w, + "height", G_TYPE_INT, tmp_h, + "format", GST_TYPE_FOURCC, fourcc, + NULL); + } + + GST_DEBUG_OBJECT (camerasrc, + "selected preview width: %d, height: %d, format: %" + GST_FOURCC_FORMAT, tmp_w, tmp_h, GST_FOURCC_ARGS (fourcc)); + + camerasrc->preview_resolution_set = ret; + } + } else { + GST_DEBUG_OBJECT (camerasrc, "format not set"); + ret = FALSE; + } + + if (ret) { + camerasrc->preview_w = tmp_w; + camerasrc->preview_h = tmp_h; + camerasrc->preview_fourcc = fourcc; + } + + return ret; +} + + + + + diff --git a/gst-libs/gst/camera/gstmfldcameraphotoiface.h b/gst-libs/gst/camera/gstmfldcameraphotoiface.h new file mode 100644 index 0000000..58a7516 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcameraphotoiface.h @@ -0,0 +1,338 @@ +/* GStreamer + * + * Copyright (C) 2008-2010 Nokia Corporation + * Copyright (C) 2010 Intel Corporation + * + * gstcameraphotoiface.h: Photo interface implementation for camerasrc + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_CAMERA_SRC_PHOTOGRAPHY_H__ +#define __GST_CAMERA_SRC_PHOTOGRAPHY_H__ + +#include +#include + +#include "gstmfldcamerasrc.h" + +G_BEGIN_DECLS + +typedef enum { + PROP_0, + PROP_ALWAYS_COPY, + PROP_MAKER_NOTE, + PROP_CAPTURE_MODE, + PROP_VIEWFINDER_MODE, + PROP_DRIVER_NAME, + PROP_WB_MODE, + PROP_COLOUR_TONE, + PROP_SCENE_MODE, + PROP_FLASH_MODE, + PROP_CAPABILITIES, + PROP_EV_COMP, + PROP_ISO_SPEED, + PROP_APERTURE, + PROP_EXPOSURE, + PROP_ZOOM, + PROP_FLICKER_MODE, + PROP_FOCUS_MODE, + PROP_IMAGE_CAPTURE_CAPS, + PROP_IMAGE_PREVIEW_CAPS, + PROP_AUTOFOCUS, + PROP_NOISE_REDUCTION, + PROP_CAPTURE_WIDTH, + PROP_CAPTURE_HEIGHT, + PROP_CAPTURE_FOURCC, + PROP_CAPTURE_COUNT, + PROP_REQ_NEGOTIATION, + PROP_SIGNAL_STILL_CAPTURE +} GstCameraSrcProperties; + + +gboolean +gst_camerasrc_photo_set_ev_compensation (GstCameraSrc * camerasrc, + gfloat ev_comp); + +gboolean +gst_camerasrc_photo_get_ev_compensation (GstCameraSrc * camerasrc, + gfloat *ev_comp); + +gboolean +gst_camerasrc_photo_set_iso_speed (GstCameraSrc * camerasrc, + guint iso_speed); + +gboolean +gst_camerasrc_photo_get_iso_speed (GstCameraSrc * camerasrc, + guint *iso_speed); + +gboolean +gst_camerasrc_photo_set_aperture (GstCameraSrc * camerasrc, + guint aperture); + +gboolean +gst_camerasrc_photo_get_aperture (GstCameraSrc * camerasrc, + guint *aperture); + +gboolean +gst_camerasrc_photo_set_exposure (GstCameraSrc * camerasrc, + guint32 exposure); + +gboolean +gst_camerasrc_photo_get_exposure (GstCameraSrc * camerasrc, + guint32 *exposure); + +gboolean +gst_camerasrc_photo_set_wb_mode (GstCameraSrc * camerasrc, + GstWhiteBalanceMode mode); + +gboolean +gst_camerasrc_photo_get_wb_mode (GstCameraSrc * camerasrc, + GstWhiteBalanceMode *mode); + +gboolean +gst_camerasrc_photo_set_tone_mode (GstCameraSrc * camerasrc, + GstColourToneMode mode); + +gboolean +gst_camerasrc_photo_get_tone_mode (GstCameraSrc * camerasrc, + GstColourToneMode *mode); + +gboolean +gst_camerasrc_photo_set_scene_mode (GstCameraSrc * camerasrc, + GstSceneMode mode); + +gboolean +gst_camerasrc_photo_get_scene_mode (GstCameraSrc * camerasrc, + GstSceneMode *mode); + +gboolean +gst_camerasrc_photo_set_flash_mode (GstCameraSrc * camerasrc, + GstFlashMode mode); + +gboolean +gst_camerasrc_photo_get_flash_mode (GstCameraSrc * camerasrc, + GstFlashMode *mode); + +gboolean +gst_camerasrc_photo_set_zoom (GstCameraSrc * camerasrc, + gfloat zoom); + +gboolean +gst_camerasrc_photo_get_zoom (GstCameraSrc * camerasrc, + gfloat *zoom); + +gboolean +gst_camerasrc_photo_set_flicker_mode (GstCameraSrc * camerasrc, + GstFlickerReductionMode mode); + +gboolean +gst_camerasrc_photo_get_flicker_mode (GstCameraSrc * camerasrc, + GstFlickerReductionMode *mode); + +gboolean +gst_camerasrc_photo_set_focus_mode (GstCameraSrc * camerasrc, + GstFocusMode mode); +gboolean +gst_camerasrc_photo_get_focus_mode (GstCameraSrc * camerasrc, + GstFocusMode *mode); + +gboolean +gst_camerasrc_photo_set_noise_reduction (GstCameraSrc * camerasrc, + GstPhotographyNoiseReduction mode); +gboolean +gst_camerasrc_photo_get_noise_reduction (GstCameraSrc * camerasrc, + GstPhotographyNoiseReduction *mode); + +void +gst_camerasrc_photo_set_autofocus (GstCameraSrc * camerasrc, + gboolean on); + +gboolean +gst_camerasrc_photo_prepare_for_capture (GstCameraSrc * camerasrc, + GstPhotoCapturePrepared func, + GstCaps *capture_caps, + gpointer user_data); + +GstPhotoCaps +gst_camerasrc_photo_get_capabilities (GstCameraSrc * camerasrc); + +void +gst_camerasrc_photo_ready_for_capture (GstCameraSrc *camerasrc, + GstCaps *selected); + +gboolean +gst_camerasrc_photo_set_config (GstCameraSrc * camerasrc, + GstPhotoSettings * config); + +gboolean +gst_camerasrc_photo_get_config (GstCameraSrc * camerasrc, + GstPhotoSettings * config); + +gboolean +gst_camerasrc_photo_set_format (GstCameraSrc * camerasrc, + GstOperationMode op_mode, + GstCaps * op_mode_caps); +GstCaps * +gst_camerasrc_photo_get_format (GstCameraSrc * camerasrc, + GstOperationMode op_mode); + +gboolean +gst_camerasrc_photo_set_property (GstCameraSrc *camerasrc, + guint prop_id, + const GValue * value); + +gboolean +gst_camerasrc_photo_get_property (GstCameraSrc *camerasrc, + guint prop_id, + GValue * value); + +gboolean +gst_camerasrc_photo_parse_preview_caps (GstCameraSrc *camerasrc, + GstCaps *op_mode_caps); + +#define GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, function_name, param_type) \ + \ +gboolean \ +interface_as_function ## _photo_set_ ## function_name (GstPhotography * photo, \ + param_type param) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_set_ ## function_name (this, param); \ +} \ + \ +gboolean \ +interface_as_function ## _photo_get_ ## function_name (GstPhotography * photo, \ + param_type * param) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_get_ ## function_name (this, param); \ +} + + +#define GST_IMPLEMENT_CAMERA_SRC_PHOTO_METHODS(Type, interface_as_function) \ + \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, ev_compensation, gfloat) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, iso_speed, guint) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, aperture, guint) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, exposure, guint32) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, wb_mode, GstWhiteBalanceMode) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, tone_mode, GstColourToneMode) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, scene_mode, GstSceneMode) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, flash_mode, GstFlashMode) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, zoom, gfloat) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, flicker_mode, GstFlickerReductionMode) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, focus_mode, GstFocusMode) \ +GST_CAMERA_SRC_PHOTO_FUNCS(Type, interface_as_function, noise_reduction, GstPhotographyNoiseReduction) \ + \ +GstPhotoCaps \ +interface_as_function ## _photo_get_capabilities (GstPhotography * photo) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_get_capabilities (this); \ +} \ + \ +gboolean \ +interface_as_function ## _photo_prepare_for_capture (GstPhotography * photo, \ + GstPhotoCapturePrepared func, \ + GstCaps *capture_caps, \ + gpointer user_data) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_prepare_for_capture (this, func, \ + capture_caps, user_data); \ +} \ + \ +void \ +interface_as_function ## _photo_set_autofocus (GstPhotography * photo, \ + gboolean on) \ +{ \ + Type *this = (Type*) photo; \ + gst_camerasrc_photo_set_autofocus (this, on); \ +} \ + \ +gboolean \ +interface_as_function ## _photo_set_config (GstPhotography * photo, \ + GstPhotoSettings * config) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_set_config (this, config); \ +} \ + \ +gboolean \ +interface_as_function ## _photo_get_config (GstPhotography * photo, \ + GstPhotoSettings * config) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_get_config (this, config); \ +} \ + \ +gboolean \ +interface_as_function ## _photo_set_format (GstPhotography * photo, \ + GstOperationMode op_mode, \ + GstCaps * op_mode_caps) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_set_format (this, op_mode, op_mode_caps); \ +} \ + \ +GstCaps * \ +interface_as_function ## _photo_get_format (GstPhotography * photo, \ + GstOperationMode op_mode) \ +{ \ + Type *this = (Type*) photo; \ + return gst_camerasrc_photo_get_format (this, op_mode); \ +} \ + \ +void \ +interface_as_function ## _photo_interface_init (GstPhotographyInterface * iface) \ +{ \ + iface->set_ev_compensation = interface_as_function ## _photo_set_ev_compensation; \ + iface->get_ev_compensation = interface_as_function ## _photo_get_ev_compensation; \ + iface->set_iso_speed = interface_as_function ## _photo_set_iso_speed; \ + iface->get_iso_speed = interface_as_function ## _photo_get_iso_speed; \ + iface->set_aperture = interface_as_function ## _photo_set_aperture; \ + iface->get_aperture = interface_as_function ## _photo_get_aperture; \ + iface->set_exposure = interface_as_function ## _photo_set_exposure; \ + iface->get_exposure = interface_as_function ## _photo_get_exposure; \ + iface->set_white_balance_mode = interface_as_function ## _photo_set_wb_mode; \ + iface->get_white_balance_mode = interface_as_function ## _photo_get_wb_mode; \ + iface->set_colour_tone_mode = interface_as_function ## _photo_set_tone_mode; \ + iface->get_colour_tone_mode = interface_as_function ## _photo_get_tone_mode; \ + iface->set_scene_mode = interface_as_function ## _photo_set_scene_mode; \ + iface->get_scene_mode = interface_as_function ## _photo_get_scene_mode; \ + iface->set_flash_mode = interface_as_function ## _photo_set_flash_mode; \ + iface->get_flash_mode = interface_as_function ## _photo_get_flash_mode; \ + iface->set_zoom = interface_as_function ## _photo_set_zoom; \ + iface->get_zoom = interface_as_function ## _photo_get_zoom; \ + iface->set_flicker_mode = interface_as_function ## _photo_set_flicker_mode; \ + iface->get_flicker_mode = interface_as_function ## _photo_get_flicker_mode; \ + iface->set_focus_mode = interface_as_function ## _photo_set_focus_mode; \ + iface->get_focus_mode = interface_as_function ## _photo_get_focus_mode; \ + iface->set_config = interface_as_function ## _photo_set_config; \ + iface->get_config = interface_as_function ## _photo_get_config; \ + iface->set_format = interface_as_function ## _photo_set_format; \ + iface->get_format = interface_as_function ## _photo_get_format; \ + iface->set_noise_reduction = interface_as_function ## _photo_set_noise_reduction; \ + iface->get_noise_reduction = interface_as_function ## _photo_get_noise_reduction; \ + \ + iface->get_capabilities = interface_as_function ## _photo_get_capabilities; \ + iface->prepare_for_capture = interface_as_function ## _photo_prepare_for_capture; \ + iface->set_autofocus = interface_as_function ## _photo_set_autofocus; \ +} \ + + +#endif /* __GST_CAMERA_SRC_PHOTOGRAPHY_H__ */ diff --git a/gst-libs/gst/camera/gstmfldcamerasrc.c b/gst-libs/gst/camera/gstmfldcamerasrc.c new file mode 100644 index 0000000..c34cf60 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcamerasrc.c @@ -0,0 +1,2671 @@ +/* GStreamer + * + * Copyright (C) 2001-2002 Ronald Bultje + * 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * 2010 Intel Corporation + * + * gstcamerasrc.c: Abstract camera base class + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:gstcamerasrc + * @short_description: Base class for camera sources + * + * + * camerasrc is a generic base class for video source elements. + * + * + * Capturing modes + * + * GstCameraSrc provides three different operating modes: VIEWFINDER, IMAGE and + * VIDEO capturing. This mode is selected by using "capture-mode" property. + * Viewfinder mode is used for capturing preview frames before performing the + * high-quality image capture, or before starting video recording. Image and + * video modes should be switched just before starting the corresponding + * action. GstCameraSrc itself doesn't make any difference between these modes, + * and this setting is mainly targeted for subclasses (see below). + * + * + * + * Image capture + * + * In order to capture and image of higher quality than negotiated in element's + * source pad, #GstPhotography's prepare_for_capture() must be called. It + * Takes a #GstCaps object as a parameter. These caps will define the desired + * high-quality capture resolution. However, this resolution is just a + * suggestion; GstCameraSrc uses the given #GstPhotoCapturePrepared callback + * to inform the selected capturing resolution back to the application. During + * this callback application also has responsibility to adjust image pipeline + * capsfilters according to this resolution. After HQ image is captured and + * pushed, GstCameraSrc switches automatically back to old viewfinder resolution. + * + * + * + * Video capture + * + * By default GstCameraSrc works just like v4l2src, so it can be used for normal + * video stream capturing as well. There is no separate GstPhotography API + * function for starting video capturing (in contrary to image capturing), but + * the notification is just given by setting the #GstCameraSrc:capture-mode + * to video. + * + * + * + * Messages + * + * During capturing process GstCameraSrc sends a bunch of GstMessages to bus: + * + * + * + * GST_PHOTOGRAPHY_AUTOFOCUS_DONE is sent when application has started + * autofocus operation and it finishes. This message contains following fields: + * "status" is an integer which contains GstFocusStatus enumeration value. + * It tells whether the focusing succeeded or failed. + * "focus-window-rows" and "focus-window-columns" are integers and they tell + * the focus matrix size. + * "focus-windows" is a GArray containing a list of integers. They define + * window numbers where the focus is. Window numbering starts from one, which + * is top-left window, increasing from left-to-right, top-to-bottom order. This + * field is only valid when focusing status is SUCCESS. + * "focus-window-coverage" field is an integer and it tells the percentage how + * big area from wiewfinder window was used for autofocusing. + * + * + * + * + * GST_PHOTOGRAPHY_SHAKE_RISK is sent during autofocusing process. It keeps + * application updated about the possible risk for shaken image. Sending of + * this message is done in subclasses, so it is implementation-specific and + * may not be always sent. + * + * + * + * + * "photo-capture-start" is sent just before the actual high-quality capture + * operation is about to happen. + * + * + * + * + * "photo-capture-end" is sent immediately after high-quality image has been + * captured. This can be used for example for playing the shutter sound + * in sync with the capture operation. + * + * + * + * + * "caf-update" messages are sent when camera subsystem supports continuous + * autofocusing (CAF). The message contains information about CAF status. It + * has one G_TYPE_INT field called "status" and its content is one of the + * values defined in GstPhotography API's GstFocusStatus enumeration. + * + * + * + * + * + * + * Supported formats + * + * Currently GstCameraSrc only supports UYVY and YUY2 color formats. + * + * + * + * Subclasses + * + * Hardware-specific imaging functionalities are implemented in subclasses. + * GstCameraSrc provides vmethods for this purpose. Subclasses tasks are to + * are handle e.g. autofocusing, flash, image capturing, + * post-processing and all other non-V4L2 standard operations. Subclass + * can also implement the API only partially; by default GstCameraSrc uses dummy + * functions for all vmethods that are not implemented by the subclass. + * + * + */ + +#ifdef HAVE_CONFIG_H +#include +#endif + +#include +#include +#include + +#include + +#include "gstmfldcameracolorbalance.h" +#include "gstmfldcameraphotoiface.h" +#include "gstmfldcameracontroliface.h" +#include + + +GST_DEBUG_CATEGORY (gst_camerasrc_debug); +#define GST_CAT_DEFAULT gst_camerasrc_debug + + +#define DEFAULT_PROP_ALWAYS_COPY FALSE + +#define MIN_ZOOM 0.0 +#define MAX_ZOOM 1000.0 +#define ZOOM_1X 100.0 + +GST_IMPLEMENT_CAMERA_SRC_COLOR_BALANCE_METHODS (GstCameraSrc, gst_camsrc); +GST_IMPLEMENT_CAMERA_SRC_PHOTO_METHODS (GstCameraSrc, gst_camsrc); +GST_IMPLEMENT_CAMERA_SRC_CAMERA_CONTROL_METHODS (GstCameraSrc, gst_camsrc); + + +/* Enumerations */ +enum { + /*signal*/ + SIGNAL_STILL_CAPTURE, + SIGNAL_NEGO_COMPLETE, + LAST_SIGNAL +}; + +static guint gst_camerasrc_signals[LAST_SIGNAL] = { 0 }; + +static gboolean +gst_camerasrc_iface_supported (GstImplementsInterface * iface, GType iface_type) +{ + GstCameraSrc *camsrc; + GstCameraSrcClass *bclass; + + camsrc = GST_CAMERA_SRC (iface); + bclass = GST_CAMERA_SRC_GET_CLASS (camsrc); + + g_assert (iface_type == GST_TYPE_PHOTOGRAPHY || + iface_type == GST_TYPE_COLOR_BALANCE || iface_type == GST_TYPE_CAMERA_CONTROL ); + + /* We implement GstPhotography in NULL state as well */ + if (iface_type == GST_TYPE_PHOTOGRAPHY) + return TRUE; + + else if (!bclass->is_open (camsrc)) + return FALSE; + + return TRUE; +} + +static void +gst_camerasrc_interface_init (GstImplementsInterfaceClass * klass) +{ + /* + * default virtual functions + */ + klass->supported = gst_camerasrc_iface_supported; +} + +void +gst_camerasrc_init_interfaces (GType type) +{ + static const GInterfaceInfo camsrc_iface_info = { + (GInterfaceInitFunc) gst_camerasrc_interface_init, + NULL, + NULL, + }; + static const GInterfaceInfo camsrc_photo_info = { + (GInterfaceInitFunc) gst_camsrc_photo_interface_init, + NULL, + NULL, + }; + static const GInterfaceInfo camsrc_colorbalance_info = { + (GInterfaceInitFunc) gst_camsrc_color_balance_interface_init, + NULL, + NULL, + }; + + static const GInterfaceInfo camsrc_cameracontrol_info = { + (GInterfaceInitFunc) gst_camsrc_camera_control_interface_init, + NULL, + NULL, + }; + + g_type_add_interface_static (type, + GST_TYPE_IMPLEMENTS_INTERFACE, &camsrc_iface_info); + g_type_add_interface_static (type, GST_TYPE_PHOTOGRAPHY, &camsrc_photo_info); + g_type_add_interface_static (type, + GST_TYPE_COLOR_BALANCE, &camsrc_colorbalance_info); + g_type_add_interface_static (type, + GST_TYPE_CAMERA_CONTROL, &camsrc_cameracontrol_info); +} + + +GType +gst_camerasrc_capture_mode_get_type (void) +{ + static GType gst_camerasrc_capture_mode_type = 0; + static GEnumValue gst_camerasrc_capture_modes[] = { + {GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER, "Viewfinder mode", "viewfinder"}, + {GST_CAMERA_SRC_CAPTURE_MODE_STILL, "Still image capture mode", "still"}, + {GST_CAMERA_SRC_CAPTURE_MODE_VIDEO, "Video capturing mode", "video"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camerasrc_capture_mode_type)) { + gst_camerasrc_capture_mode_type = + g_enum_register_static ("GstCameraCaptureMode", + gst_camerasrc_capture_modes); + } + return gst_camerasrc_capture_mode_type; +} + +GType +gst_camerasrc_viewfinder_mode_get_type (void) +{ + static GType gst_camerasrc_viewfinder_mode_type = 0; + static GEnumValue gst_camerasrc_viewfinder_modes[] = { + {GST_CAMERA_SRC_VIEWFINDER_MODE_STILL, "Still image capture mode", "still"}, + {GST_CAMERA_SRC_VIEWFINDER_MODE_VIDEO, "Video capturing mode", "video"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camerasrc_viewfinder_mode_type)) { + gst_camerasrc_viewfinder_mode_type = + g_enum_register_static ("GstCameraViewfinderMode", + gst_camerasrc_viewfinder_modes); + } + return gst_camerasrc_viewfinder_mode_type; +} + +GST_BOILERPLATE_FULL (GstCameraSrc, gst_camerasrc, GstPushSrc, + GST_TYPE_PUSH_SRC, gst_camerasrc_init_interfaces); + +static void gst_camerasrc_dispose (GObject * object); + +/* element methods */ +static GstStateChangeReturn +gst_camerasrc_change_state (GstElement * element, GstStateChange transition); + +/* basesrc methods */ +static gboolean gst_camerasrc_start (GstBaseSrc * src); + +static gboolean gst_camerasrc_unlock (GstBaseSrc * src); + +static gboolean gst_camerasrc_unlock_stop (GstBaseSrc * src); + +static gboolean gst_camerasrc_stop (GstBaseSrc * src); + +static gboolean gst_camerasrc_set_caps (GstBaseSrc * src, GstCaps * caps); + +static GstCaps *gst_camerasrc_get_caps (GstBaseSrc * src); + +static gboolean gst_camerasrc_query (GstBaseSrc * bsrc, GstQuery * query); + +static GstFlowReturn gst_camerasrc_create (GstPushSrc * src, GstBuffer ** out); + +static void gst_camerasrc_fixate (GstBaseSrc * basesrc, GstCaps * caps); + +static gboolean gst_camerasrc_negotiate (GstBaseSrc * basesrc); + +static void gst_camerasrc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); + +static void gst_camerasrc_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static void gst_camerasrc_override_photo_properties (GObjectClass * + gobject_class); + +static void gst_camerasrc_default_functions_init (GstCameraSrcClass * + camera_class); + +static gboolean gst_camerasrc_event (GstBaseSrc * src, GstEvent * event); + +static gboolean gst_camerasrc_send_event (GstElement * element, + GstEvent * event); + +static void gst_camerasrc_update_max_zoom (GstCameraSrc * camerasrc); + +/* + */ +static void +gst_camerasrc_base_init (gpointer g_class) +{ +// GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class); + gst_tag_register_musicbrainz_tags (); + + GST_DEBUG_CATEGORY_INIT (gst_camerasrc_debug, "camerasrc", 0, + "camerasrc element"); +} + + +/* VOID:OBJECT,OBJECT (generated by 'glib-genmarshal') */ +#define g_marshal_value_peek_object(v) (v)->data[0].v_pointer +void gst_camerasrc_VOID__OBJECT_OBJECT(GClosure *closure, + GValue *return_value, + guint n_param_values, + const GValue *param_values, + gpointer invocation_hint, + gpointer marshal_data) +{ + typedef void (*GMarshalFunc_VOID__OBJECT_OBJECT)(gpointer data1, + gpointer arg_1, + gpointer arg_2, + gpointer arg_3, + gpointer data2); + register GMarshalFunc_VOID__OBJECT_OBJECT callback; + register GCClosure *cc = (GCClosure*) closure; + register gpointer data1, data2; + + g_return_if_fail (n_param_values == 4); + + if (G_CCLOSURE_SWAP_DATA(closure)) { + data1 = closure->data; + data2 = g_value_peek_pointer(param_values + 0); + } else { + data1 = g_value_peek_pointer(param_values + 0); + data2 = closure->data; + } + + callback = (GMarshalFunc_VOID__OBJECT_OBJECT)(marshal_data ? marshal_data : cc->callback); + + callback(data1, + g_marshal_value_peek_object(param_values + 1), + g_marshal_value_peek_object(param_values + 2), + g_marshal_value_peek_object(param_values + 3), + data2); +} + +int gst_camerasrc_send_af_status(GstCameraSrc *camsrc , int state) +{ + GstMessage *m = NULL; + GstStructure *s = NULL; + + GST_INFO_OBJECT(camsrc, "autofocus callback: state [%d]", state); + + s = gst_structure_new("camerasrc-AF", + "focus-state", G_TYPE_INT, state, + NULL); + + m = gst_message_new_element(GST_OBJECT(camsrc), s); + gst_element_post_message(GST_ELEMENT(camsrc), m); + + return 0; +} +/* + */ +static void +gst_camerasrc_class_init (GstCameraSrcClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + GstBaseSrcClass *basesrc_class; + GstPushSrcClass *pushsrc_class; + + gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); + basesrc_class = GST_BASE_SRC_CLASS (klass); + pushsrc_class = GST_PUSH_SRC_CLASS (klass); + + gobject_class->dispose = gst_camerasrc_dispose; + gobject_class->set_property = gst_camerasrc_set_property; + gobject_class->get_property = gst_camerasrc_get_property; + + element_class->change_state = gst_camerasrc_change_state; + element_class->send_event = gst_camerasrc_send_event; + + g_object_class_install_property (gobject_class, PROP_ALWAYS_COPY, + g_param_spec_boolean ("always-copy", "Always Copy", + "If the buffer will or not be used directly from mmap", + DEFAULT_PROP_ALWAYS_COPY, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_MAKER_NOTE, + g_param_spec_boolean ("maker-note", "Maker Note", + "Whether send the maker note with image during the capture", + FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_WIDTH, + g_param_spec_int ("capture-width", "Capture Widht", + "Image width used in capture mode.In case capture is done when pipeline is already running in viewfinder mode", 0, + G_MAXINT, 0, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_HEIGHT, + g_param_spec_int ("capture-height", "Capture Height", + "Image height used in capture mode. In case capture is done when pipeline is already running in viewfinder mode", 0, + G_MAXINT, 0, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property(gobject_class, PROP_CAPTURE_FOURCC, + g_param_spec_uint("capture-fourcc", "Capture format", + "Fourcc value for capture format",0, + G_MAXUINT, 0, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property(gobject_class, PROP_CAPTURE_COUNT, + g_param_spec_uint("capture-count", "Capture count", + "image count taken in capture",0, + G_MAXUINT, 0, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property(gobject_class, PROP_REQ_NEGOTIATION, + g_param_spec_boolean("req-negotiation", "Request re-negotiation", + "Request to negotiate while on playing", + FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE )); + + g_object_class_install_property(gobject_class, PROP_SIGNAL_STILL_CAPTURE, + g_param_spec_boolean("signal-still-capture", "Signal Still Capture", + "Send a signal before pushing the buffer", + FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE )); + + /** + * GstCameraSrc:capture-mode: + * + * Defines capturing mode to be used. This information is needed by extension + * modules, which control hardware-specific functionality. + */ + g_object_class_install_property (gobject_class, PROP_CAPTURE_MODE, + g_param_spec_enum ("capture-mode", "Capturing mode", + "Defines what kind of capturing mode to be used", + GST_TYPE_CAMERA_SRC_CAPTURE_MODE, + GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + /** + * GstCameraSrc:viewfinder-mode: + * + * Defines which mode viewfinder should run in. This information is needed + * by extension modules, which control hardware-specific functionality. + */ + g_object_class_install_property (gobject_class, PROP_VIEWFINDER_MODE, + g_param_spec_enum ("viewfinder-mode", "Viewfinder mode", + "Defines in what mode viewfinder should be run", + GST_TYPE_CAMERA_SRC_VIEWFINDER_MODE, + GST_CAMERA_SRC_VIEWFINDER_MODE_STILL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + + /** + * GstCameraSrc::nego-complete: + * @camerasrc: the camerasrc instance + * @start: when re-negotiation is finished. + * + */ + gst_camerasrc_signals[SIGNAL_NEGO_COMPLETE] = + g_signal_new("nego-complete", + G_TYPE_FROM_CLASS (klass), + G_SIGNAL_RUN_LAST, + G_STRUCT_OFFSET(GstCameraSrcClass, nego_complete), + NULL, + NULL, + gst_marshal_VOID__VOID, + G_TYPE_NONE, 0); + + /** + * GstCameraSrc::still-capture: + * @camerasrc: the camerasrc instance + * @buffer: the buffer that will be pushed - Main + * @buffer: the buffer that will be pushed - Thumbnail + * @buffer: the buffer that will be pushed - Screennail + * + * This signal gets emitted before sending the buffer. + */ + gst_camerasrc_signals[SIGNAL_STILL_CAPTURE] = + g_signal_new("still-capture", + G_TYPE_FROM_CLASS(klass), + G_SIGNAL_RUN_LAST, + G_STRUCT_OFFSET(GstCameraSrcClass, still_capture), + NULL, + NULL, + gst_camerasrc_VOID__OBJECT_OBJECT, + G_TYPE_NONE, + 3, /* Number of parameter */ + GST_TYPE_BUFFER, /* Main image buffer */ + GST_TYPE_BUFFER, /* Thumbnail image buffer */ + GST_TYPE_BUFFER); /* Screennail image buffer */ + + + + gst_camerasrc_override_photo_properties (gobject_class); + + basesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_camerasrc_get_caps); + basesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_camerasrc_set_caps); + basesrc_class->start = GST_DEBUG_FUNCPTR (gst_camerasrc_start); + basesrc_class->unlock = GST_DEBUG_FUNCPTR (gst_camerasrc_unlock); + basesrc_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_camerasrc_unlock_stop); + basesrc_class->stop = GST_DEBUG_FUNCPTR (gst_camerasrc_stop); + basesrc_class->query = GST_DEBUG_FUNCPTR (gst_camerasrc_query); + basesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_camerasrc_fixate); + basesrc_class->negotiate = GST_DEBUG_FUNCPTR (gst_camerasrc_negotiate); + basesrc_class->event = GST_DEBUG_FUNCPTR (gst_camerasrc_event); + + pushsrc_class->create = GST_DEBUG_FUNCPTR (gst_camerasrc_create); + + /* Initialize vmethods with default implementations */ + gst_camerasrc_default_functions_init (klass); +} + +/* + */ +static void +gst_camerasrc_init (GstCameraSrc * camerasrc, GstCameraSrcClass * klass) +{ + /* number of buffers requested */ + camerasrc->always_copy = DEFAULT_PROP_ALWAYS_COPY; + + camerasrc->state_lock = g_mutex_new (); + camerasrc->af_lock = g_mutex_new (); + + gst_base_src_set_format (GST_BASE_SRC (camerasrc), GST_FORMAT_TIME); + gst_base_src_set_live (GST_BASE_SRC (camerasrc), TRUE); + + /* Photo interface */ + camerasrc->photoconf.zoom = 1.0; + camerasrc->photoconf.ev_compensation = 0.0; + camerasrc->photoconf.exposure = 0; /* 0 = auto */ + camerasrc->photoconf.aperture = 0; /* 0 = auto */ + camerasrc->photoconf.iso_speed = 0; /* 0 = auto */ + camerasrc->photoconf.wb_mode = GST_PHOTOGRAPHY_WB_MODE_AUTO; + camerasrc->photoconf.scene_mode = GST_PHOTOGRAPHY_SCENE_MODE_MANUAL; + camerasrc->photoconf.flash_mode = GST_PHOTOGRAPHY_FLASH_MODE_OFF; + camerasrc->photoconf.tone_mode = GST_PHOTOGRAPHY_COLOUR_TONE_MODE_NORMAL; + camerasrc->photoconf.flicker_mode = GST_PHOTOGRAPHY_FLICKER_REDUCTION_50HZ; + camerasrc->photoconf.focus_mode = GST_PHOTOGRAPHY_FOCUS_MODE_AUTO; + camerasrc->photoconf.noise_reduction = GST_PHOTOGRAPHY_NOISE_REDUCTION_YCC | + GST_PHOTOGRAPHY_NOISE_REDUCTION_BAYER; + + camerasrc->photo_capture_phase = GST_CAMERA_VIEWFINDER; + camerasrc->capture_mode = GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER; + camerasrc->viewfinder_mode = GST_CAMERA_SRC_VIEWFINDER_MODE_STILL; + camerasrc->capture_resolution_set = FALSE; + camerasrc->preview_resolution_set = FALSE; + camerasrc->req_negotiation = FALSE; + camerasrc->signal_still_capture = FALSE; + + camerasrc->capture_w = 0; + camerasrc->capture_h = 0; + camerasrc->capture_fps_n = 0; + camerasrc->capture_fps_d = 0; + camerasrc->capture_fourcc = 0; + camerasrc->capture_count = 1; + camerasrc->capture_counter = 1; + + camerasrc->preview_w = 0; + camerasrc->preview_h = 0; + camerasrc->preview_fourcc = 0; + + camerasrc->requested_af_mode = AF_NONE_REQUESTED; + camerasrc->maker_note = FALSE; +} + +/* + */ +static void +gst_camerasrc_dispose (GObject * object) +{ + GstCameraSrc *camerasrc = GST_CAMERA_SRC_CAST (object); + + if (camerasrc->vf_caps) { + gst_caps_unref (camerasrc->vf_caps); + camerasrc->vf_caps = NULL; + } + if (camerasrc->state_lock) { + g_mutex_free (camerasrc->state_lock); + camerasrc->state_lock = NULL; + } + if (camerasrc->af_lock) { + g_mutex_free (camerasrc->af_lock); + camerasrc->af_lock = NULL; + } + gst_camerasrc_clear_color_channels (camerasrc); + + gst_camerasrc_clear_cameracontrol_channels (camerasrc); + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static void +gst_camerasrc_override_photo_properties (GObjectClass * gobject_class) +{ + g_object_class_override_property (gobject_class, PROP_WB_MODE, + GST_PHOTOGRAPHY_PROP_WB_MODE); + + g_object_class_override_property (gobject_class, PROP_COLOUR_TONE, + GST_PHOTOGRAPHY_PROP_COLOUR_TONE); + + g_object_class_override_property (gobject_class, PROP_SCENE_MODE, + GST_PHOTOGRAPHY_PROP_SCENE_MODE); + + g_object_class_override_property (gobject_class, PROP_FLASH_MODE, + GST_PHOTOGRAPHY_PROP_FLASH_MODE); + + g_object_class_override_property (gobject_class, PROP_CAPABILITIES, + GST_PHOTOGRAPHY_PROP_CAPABILITIES); + + g_object_class_override_property (gobject_class, PROP_EV_COMP, + GST_PHOTOGRAPHY_PROP_EV_COMP); + + g_object_class_override_property (gobject_class, PROP_ISO_SPEED, + GST_PHOTOGRAPHY_PROP_ISO_SPEED); + + g_object_class_override_property (gobject_class, PROP_APERTURE, + GST_PHOTOGRAPHY_PROP_APERTURE); + + g_object_class_override_property (gobject_class, PROP_EXPOSURE, + GST_PHOTOGRAPHY_PROP_EXPOSURE); + + g_object_class_override_property (gobject_class, PROP_ZOOM, + GST_PHOTOGRAPHY_PROP_ZOOM); + + g_object_class_override_property (gobject_class, PROP_FLICKER_MODE, + GST_PHOTOGRAPHY_PROP_FLICKER_MODE); + + g_object_class_override_property (gobject_class, PROP_FOCUS_MODE, + GST_PHOTOGRAPHY_PROP_FOCUS_MODE); + + g_object_class_override_property (gobject_class, PROP_NOISE_REDUCTION, + GST_PHOTOGRAPHY_PROP_NOISE_REDUCTION); + + g_object_class_override_property (gobject_class, PROP_IMAGE_CAPTURE_CAPS, + GST_PHOTOGRAPHY_PROP_IMAGE_CAPTURE_SUPPORTED_CAPS); + + g_object_class_override_property (gobject_class, PROP_IMAGE_PREVIEW_CAPS, + GST_PHOTOGRAPHY_PROP_IMAGE_PREVIEW_SUPPORTED_CAPS); + + g_object_class_override_property (gobject_class, PROP_AUTOFOCUS, + GST_PHOTOGRAPHY_PROP_AUTOFOCUS); +} + + +/* + */ +static void +gst_camerasrc_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstCameraSrc *camerasrc; + GstCameraSrcClass *bclass; + + camerasrc = GST_CAMERA_SRC_CAST (object); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (gst_camerasrc_photo_set_property (camerasrc, prop_id, value)) + return; + + switch (prop_id) { + case PROP_ALWAYS_COPY: + camerasrc->always_copy = g_value_get_boolean (value); + break; + case PROP_MAKER_NOTE: + camerasrc->maker_note = g_value_get_boolean (value); + break; + case PROP_CAPTURE_MODE: + camerasrc->capture_mode = g_value_get_enum (value); + GST_INFO_OBJECT (camerasrc, "Setting capture mode: %d", + camerasrc->capture_mode); + + if (camerasrc->capture_mode == GST_CAMERA_SRC_CAPTURE_MODE_VIDEO && + bclass->is_active (camerasrc)) { + /* Stop autofocus in video mode */ + bclass->set_autofocus (camerasrc, FALSE); + /* AutoExposure must be run always in video mode */ + bclass->set_autoexposure (camerasrc, TRUE); + } + if (bclass->set_capture_mode) { + bclass->set_capture_mode (camerasrc, camerasrc->capture_mode); + } + break; + + case PROP_VIEWFINDER_MODE: + camerasrc->viewfinder_mode = g_value_get_enum (value); + GST_INFO_OBJECT (camerasrc, "Setting viewfinder mode: %d", + camerasrc->viewfinder_mode); + + if (bclass->set_vfinder_mode) { + bclass->set_vfinder_mode (camerasrc, camerasrc->viewfinder_mode); + } + break; + case PROP_CAPTURE_WIDTH: + camerasrc->capture_w = g_value_get_int (value); + GST_DEBUG_OBJECT (camerasrc, "Capture width %d", + camerasrc->capture_w); + /* both width & height should be set before setting flag to TRUE */ + if(camerasrc->capture_h != 0) + camerasrc->capture_resolution_set = TRUE; + break; + case PROP_CAPTURE_HEIGHT: + camerasrc->capture_h = g_value_get_int (value); + GST_DEBUG_OBJECT (camerasrc, "Capture Height: %d", + camerasrc->capture_h); + /* both width & height should be set before setting flag to TRUE */ + if(camerasrc->capture_w != 0) + camerasrc->capture_resolution_set = TRUE; + break; + case PROP_CAPTURE_FOURCC: + camerasrc->capture_fourcc = g_value_get_uint (value); + GST_DEBUG_OBJECT (camerasrc, "Capture fourcc: %d", + camerasrc->capture_fourcc); + break; + case PROP_CAPTURE_COUNT: + camerasrc->capture_count = g_value_get_uint (value); + GST_DEBUG_OBJECT (camerasrc, "Capture count: %d", + camerasrc->capture_count); + break; + case PROP_REQ_NEGOTIATION: + camerasrc->req_negotiation = g_value_get_boolean(value); + GST_DEBUG_OBJECT (camerasrc, "negotaion request %d", + camerasrc->req_negotiation); + break; + case PROP_SIGNAL_STILL_CAPTURE: + camerasrc->signal_still_capture = g_value_get_boolean(value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +/* + */ +static void +gst_camerasrc_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstCameraSrc *camerasrc = GST_CAMERA_SRC_CAST (object); + + if (gst_camerasrc_photo_get_property (camerasrc, prop_id, value)) + return; + + switch (prop_id) { + case PROP_ALWAYS_COPY: + g_value_set_boolean (value, camerasrc->always_copy); + break; + case PROP_MAKER_NOTE: + g_value_set_boolean (value, camerasrc->maker_note); + break; + case PROP_CAPTURE_MODE: + g_value_set_enum (value, camerasrc->capture_mode); + break; + case PROP_VIEWFINDER_MODE: + g_value_set_enum (value, camerasrc->viewfinder_mode); + break; + case PROP_CAPTURE_WIDTH: + g_value_set_int (value, camerasrc->capture_w); + break; + case PROP_CAPTURE_HEIGHT: + g_value_set_int (value, camerasrc->capture_h); + break; + case PROP_CAPTURE_FOURCC: + g_value_set_uint (value, camerasrc->capture_fourcc); + break; + case PROP_CAPTURE_COUNT: + g_value_set_uint (value, camerasrc->capture_count); + break; + case PROP_REQ_NEGOTIATION: + g_value_set_boolean(value, camerasrc->req_negotiation); + break; + case PROP_SIGNAL_STILL_CAPTURE: + g_value_set_boolean (value, camerasrc->signal_still_capture); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +/* this function is a bit of a last resort */ +static void +gst_camerasrc_fixate (GstBaseSrc * basesrc, GstCaps * caps) +{ + GstStructure *structure; + + gint i; + + GST_DEBUG_OBJECT (basesrc, "fixating caps %" GST_PTR_FORMAT, caps); + + for (i = 0; i < gst_caps_get_size (caps); ++i) { + const GValue *v; + + structure = gst_caps_get_structure (caps, i); + + /* FIXME such sizes? we usually fixate to something in the 320x200 + * range... */ + /* We are fixating to greatest possble size (limited to GST_CAMERA_SRC_MAX_SIZE) + and the maximum framerate resolution for that size */ + gst_structure_fixate_field_nearest_int (structure, "width", + GST_CAMERA_SRC_MAX_SIZE); + gst_structure_fixate_field_nearest_int (structure, "height", + GST_CAMERA_SRC_MAX_SIZE); + gst_structure_fixate_field_nearest_fraction (structure, "framerate", + G_MAXINT, 1); + + v = gst_structure_get_value (structure, "format"); + if (v && G_VALUE_TYPE (v) != GST_TYPE_FOURCC) { + guint32 fourcc; + + g_return_if_fail (G_VALUE_TYPE (v) == GST_TYPE_LIST); + + fourcc = gst_value_get_fourcc (gst_value_list_get_value (v, 0)); + gst_structure_set (structure, "format", GST_TYPE_FOURCC, fourcc, NULL); + } + } + + GST_DEBUG_OBJECT (basesrc, "fixated caps %" GST_PTR_FORMAT, caps); +} + +/* + */ +static gboolean +gst_camerasrc_negotiate (GstBaseSrc * basesrc) +{ + GstCaps *thiscaps; + + GstCaps *caps = NULL; + + GstCaps *peercaps = NULL; + + gboolean result = FALSE; + + /* first see what is possible on our source pad */ + thiscaps = gst_pad_get_caps (GST_BASE_SRC_PAD (basesrc)); + GST_DEBUG_OBJECT (basesrc, "caps of src suppressed on DEBUG (>= 5 to see)"); + GST_LOG_OBJECT (basesrc, "caps of src: %" GST_PTR_FORMAT, thiscaps); + /* nothing or anything is allowed, we're done */ + if (thiscaps == NULL || gst_caps_is_any (thiscaps)) + goto no_nego_needed; + + /* get the peer caps */ + peercaps = gst_pad_peer_get_caps (GST_BASE_SRC_PAD (basesrc)); + GST_DEBUG_OBJECT (basesrc, "caps of peer: %" GST_PTR_FORMAT, peercaps); + if (peercaps && !gst_caps_is_any (peercaps)) { + GstCaps *icaps = NULL; + int i; + + /* Prefer the first caps we are compatible with that the peer proposed */ + for (i = 0; i < gst_caps_get_size (peercaps); i++) { + /* get intersection */ + GstCaps *ipcaps = gst_caps_copy_nth (peercaps, i); + + GST_DEBUG_OBJECT (basesrc, "peer: %" GST_PTR_FORMAT, ipcaps); + + icaps = gst_caps_intersect (thiscaps, ipcaps); + gst_caps_unref (ipcaps); + + if (!gst_caps_is_empty (icaps)) + break; + + gst_caps_unref (icaps); + icaps = NULL; + } + + GST_DEBUG_OBJECT (basesrc, "intersect: %" GST_PTR_FORMAT, icaps); + if (icaps) { + /* If there are multiple intersections pick the one with the smallest + * resolution strictly bigger then the first peer caps */ + if (gst_caps_get_size (icaps) > 1) { + GstStructure *s = gst_caps_get_structure (peercaps, 0); + + int best = 0; + + int twidth, theight; + + int width = G_MAXINT, height = G_MAXINT; + + if (gst_structure_get_int (s, "width", &twidth) + && gst_structure_get_int (s, "height", &theight)) { + + /* Walk the structure backwards to get the first entry of the + * smallest resolution bigger (or equal to) the preferred resolution) + */ + for (i = gst_caps_get_size (icaps) - 1; i >= 0; i--) { + GstStructure *is = gst_caps_get_structure (icaps, i); + + int w, h; + + if (gst_structure_get_int (is, "width", &w) + && gst_structure_get_int (is, "height", &h)) { + if (w >= twidth && w <= width && h >= theight && h <= height) { + width = w; + height = h; + best = i; + } + } + } + } + + caps = gst_caps_copy_nth (icaps, best); + gst_caps_unref (icaps); + } else { + caps = icaps; + } + } + gst_caps_unref (thiscaps); + gst_caps_unref (peercaps); + } else { + /* no peer or peer have ANY caps, work with our own caps then */ + caps = thiscaps; + } + if (caps) { + caps = gst_caps_make_writable (caps); + gst_caps_truncate (caps); + + /* now fixate */ + if (!gst_caps_is_empty (caps)) { + gst_pad_fixate_caps (GST_BASE_SRC_PAD (basesrc), caps); + GST_DEBUG_OBJECT (basesrc, "fixated to: %" GST_PTR_FORMAT, caps); + + if (gst_caps_is_any (caps)) { + /* hmm, still anything, so element can do anything and + * nego is not needed */ + result = TRUE; + } else if (gst_caps_is_fixed (caps)) { + /* yay, fixed caps, use those then */ + result = gst_pad_set_caps (GST_BASE_SRC_PAD (basesrc), caps); + GST_DEBUG_OBJECT (basesrc, "Set caps returned: %d", result); + } + } + gst_caps_unref (caps); + } + return result; + +no_nego_needed: + { + GST_DEBUG_OBJECT (basesrc, "no negotiation needed"); + if (thiscaps) + gst_caps_unref (thiscaps); + return TRUE; + } +} + + +/* + */ +static GstCaps * +gst_camerasrc_get_caps (GstBaseSrc * src) +{ + GstCameraSrc *camerasrc; + GstCameraSrcClass *bclass; + + camerasrc = GST_CAMERA_SRC_CAST (src); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + if (!bclass->is_open (camerasrc) || !bclass->get_caps) { + return gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_SRC_PAD + (camerasrc))); + } + + return bclass->get_caps (camerasrc); +} + + +/** + * gst_camerasrc_get_caps_info: + * @caps: given input caps + * @four_cc: location for the fourcc + * @w/@h: location for width and height + * @fps_n/@fps_d: location for framerate + * + * Collect data for the given caps. + */ +static gboolean +gst_camerasrc_get_caps_info (GstCameraSrc * camerasrc, GstCaps * caps, + guint32 * four_cc, guint * w, guint * h, guint * fps_n, guint * fps_d, + guint * size) +{ + GstCameraSrcClass *bclass; + GstStructure *structure; + const GValue *framerate; + const gchar *mimetype; + guint32 fourcc; + guint outsize; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + /* default unknown values */ + fourcc = 0; + outsize = 0; + + structure = gst_caps_get_structure (caps, 0); + + mimetype = gst_structure_get_name (structure); + + if (!gst_structure_get_int (structure, "width", (gint *) w)) + return FALSE; + + if (!gst_structure_get_int (structure, "height", (gint *) h)) + return FALSE; + + framerate = gst_structure_get_value (structure, "framerate"); + if (!framerate) + return FALSE; + + *fps_n = gst_value_get_fraction_numerator (framerate); + *fps_d = gst_value_get_fraction_denominator (framerate); + + if (!strcmp (mimetype, "video/x-vaapi-sharing")) { + fourcc = V4L2_PIX_FMT_NV12; + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += (GST_ROUND_UP_4 (*w) * *h) / 2; + } else if (!strcmp (mimetype, "video/x-raw-yuv")) { + gst_structure_get_fourcc (structure, "format", &fourcc); + + switch (fourcc) { + case GST_MAKE_FOURCC ('I', '4', '2', '0'): + case GST_MAKE_FOURCC ('I', 'Y', 'U', 'V'): + fourcc = V4L2_PIX_FMT_YUV420; + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2)); + break; + case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'): + fourcc = V4L2_PIX_FMT_YUYV; + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; + case GST_MAKE_FOURCC ('Y', '4', '1', 'P'): + fourcc = V4L2_PIX_FMT_Y41P; + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; + case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'): + fourcc = V4L2_PIX_FMT_UYVY; + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; + case GST_MAKE_FOURCC ('Y', 'V', '1', '2'): + fourcc = V4L2_PIX_FMT_YVU420; + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2)); + break; + case GST_MAKE_FOURCC ('Y', '4', '1', 'B'): + fourcc = V4L2_PIX_FMT_YUV411P; + outsize = GST_ROUND_UP_4 (*w) * *h; + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 4) * *h); + break; + case GST_MAKE_FOURCC ('Y', '4', '2', 'B'): + fourcc = V4L2_PIX_FMT_YUV422P; + outsize = GST_ROUND_UP_4 (*w) * *h; + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * *h); + break; + case GST_MAKE_FOURCC ('N', 'V', '1', '2'): + fourcc = V4L2_PIX_FMT_NV12; + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += (GST_ROUND_UP_4 (*w) * *h) / 2; + break; + case GST_MAKE_FOURCC ('N', 'V', '2', '1'): + fourcc = V4L2_PIX_FMT_NV21; + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += (GST_ROUND_UP_4 (*w) * *h) / 2; + break; +#ifdef V4L2_PIX_FMT_YVYU + case GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'): + fourcc = V4L2_PIX_FMT_YVYU; + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; +#endif + } + } else if (!strcmp (mimetype, "video/x-raw-rgb")) { + gint depth, endianness, r_mask; + + gst_structure_get_int (structure, "depth", &depth); + gst_structure_get_int (structure, "endianness", &endianness); + gst_structure_get_int (structure, "red_mask", &r_mask); + + switch (depth) { + case 8: + fourcc = V4L2_PIX_FMT_RGB332; + break; + case 15: + fourcc = (endianness == G_LITTLE_ENDIAN) ? + V4L2_PIX_FMT_RGB555 : V4L2_PIX_FMT_RGB555X; + break; + case 16: + fourcc = (endianness == G_LITTLE_ENDIAN) ? + V4L2_PIX_FMT_RGB565 : V4L2_PIX_FMT_RGB565X; + break; + case 24: + fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR24 : V4L2_PIX_FMT_RGB24; + break; + case 32: + fourcc = (r_mask == 0xFF) ? V4L2_PIX_FMT_BGR32 : V4L2_PIX_FMT_RGB32; + break; + } + } else if (strcmp (mimetype, "video/x-raw-bayer") == 0) { + gst_structure_get_fourcc (structure, "format", &fourcc); + switch (fourcc) { + case GST_MAKE_FOURCC ('B', 'A', '8', '1'): + fourcc = V4L2_PIX_FMT_SBGGR8; + break; + case GST_MAKE_FOURCC ('G', 'B', 'R', 'G'): + fourcc = V4L2_PIX_FMT_SGBRG8; + break; + case GST_MAKE_FOURCC ('G', 'R', 'B', 'G'): + fourcc = V4L2_PIX_FMT_SGRBG8; + break; + case GST_MAKE_FOURCC ('R', 'G', 'G', 'B'): + fourcc = V4L2_PIX_FMT_SRGGB8; + break; + case GST_MAKE_FOURCC ('B', 'G', '1', '0'): + fourcc = V4L2_PIX_FMT_SBGGR10; + break; + case GST_MAKE_FOURCC ('G', 'B', '1', '0'): + fourcc = V4L2_PIX_FMT_SGBRG10; + break; + case GST_MAKE_FOURCC ('B', 'A', '1', '0'): + fourcc = V4L2_PIX_FMT_SGRBG10; + break; + case GST_MAKE_FOURCC ('R', 'G', '1', '0'): + fourcc = V4L2_PIX_FMT_SRGGB10; + break; + case GST_MAKE_FOURCC ('B', 'Y', 'R', '2'): + fourcc = V4L2_PIX_FMT_SBGGR16; + break; + } + } else if (strcmp (mimetype, "video/x-raw-gray") == 0) { + fourcc = V4L2_PIX_FMT_GREY; + } + + if (fourcc == 0) + return FALSE; + + *four_cc = fourcc; + + return TRUE; +} + + +/** + * gst_camerasrc_get_caps_from_info: + * @camsrc: #GstCameraSrc object + * @fourcc: fourcc code + * @width: width to be set + * @height: height to be set + * @fps_n: FPS numerator to be set or 0 + * @fps_d: FPS denominator to be set or 0 + * + * Converts given parameters into GstCaps structure. + * + * Returns: GstCaps representing the given values. + */ +GstCaps * +gst_camerasrc_get_caps_from_info (GstCameraSrc * camsrc, guint32 fourcc, + guint width, guint height, guint fps_n, guint fps_d) +{ + GstCaps *newcaps; + GstStructure *s; + + s = gst_structure_new ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, fourcc, + "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, NULL); + + if (fps_n != 0 && fps_d != 0) { + gst_structure_set (s, "framerate", GST_TYPE_FRACTION, fps_n, fps_d, NULL); + } + + newcaps = gst_caps_new_empty (); + gst_caps_append_structure (newcaps, s); + + return newcaps; +} + + +/* + */ +static gboolean +gst_camerasrc_configure_device (GstCameraSrc * camerasrc, guint * w, guint * h, + guint32 * fourcc, guint * fps_n, guint * fps_d, GstCaps * buffer_caps) +{ + GstCameraSrcClass *bclass; + GstOperationMode opmode; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + /* Stop the capturing */ + if (!bclass->stop (camerasrc)) + return FALSE; + + GST_DEBUG_OBJECT (camerasrc, "trying to set_capture %dx%d at %d/%d fps", + *w, *h, *fps_n, *fps_d); + + if (bclass->set_vfinder_mode) { + bclass->set_vfinder_mode (camerasrc, camerasrc->viewfinder_mode); + } + if (bclass->set_capture_mode) { + bclass->set_capture_mode (camerasrc, camerasrc->capture_mode); + } + + opmode = camerasrc->photo_capture_phase == GST_CAMERA_CAPTURE ? + GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE : + GST_PHOTOGRAPHY_OPERATION_MODE_VIEWFINDER; + + if (!bclass->set_capture (camerasrc, opmode, FALSE, fourcc, w, h, + fps_n, fps_d)) { + GST_ELEMENT_ERROR (camerasrc, RESOURCE, SETTINGS, + ("Failed configuring device for capturing"), (NULL)); + return FALSE; + } + + gst_camerasrc_update_max_zoom (camerasrc); + + /* Only start the driver when not in HQ capture mode, since in HQ mode */ + /* we have already called start_capture() above */ + + if (!bclass->start (camerasrc, buffer_caps)) + return FALSE; + + if (camerasrc->photo_capture_phase != GST_CAMERA_CAPTURE) { + + if (!bclass->write_settings (camerasrc, &camerasrc->photoconf, FALSE)) { + GST_ELEMENT_ERROR (camerasrc, RESOURCE, SETTINGS, + ("Failed to configure driver module"), (NULL)); + + return FALSE; + } + } + + return TRUE; +} + + +/* + */ +static gboolean +gst_camerasrc_init_from_caps (GstCameraSrc * camerasrc, GstCaps * caps) +{ + guint w = 0, h = 0; + guint32 fourcc; + guint fps_n, fps_d; + guint size; + gboolean ret; + + /* we want our own type of fourcc codes */ + if (!gst_camerasrc_get_caps_info (camerasrc, caps, &fourcc, &w, &h, &fps_n, + &fps_d, &size)) { + GST_DEBUG_OBJECT (camerasrc, + "can't get capture format from caps %" GST_PTR_FORMAT, caps); + return FALSE; + } + + ret = gst_camerasrc_configure_device (camerasrc, &w, &h, &fourcc, + &fps_n, &fps_d, caps); + + if (ret) { + camerasrc->current_w = w; + camerasrc->current_h = h; + camerasrc->fps_n = fps_n; + camerasrc->fps_d = fps_d; + camerasrc->current_fourcc = fourcc; + + camerasrc->duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n); + GST_DEBUG_OBJECT (camerasrc, "Buffer duration: %" GST_TIME_FORMAT, + GST_TIME_ARGS (camerasrc->duration)); + +// camerasrc->frame_byte_size = size; + } + + return ret; +} + +/* + */ +static gboolean +gst_camerasrc_set_caps (GstBaseSrc * src, GstCaps * caps) +{ + GstCameraSrc *camerasrc; + GstCameraSrcClass *bclass; + + camerasrc = GST_CAMERA_SRC_CAST (src); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "set_caps called: %" GST_PTR_FORMAT, caps); + + /* if we're not open, punt -- we'll get setcaps'd later via negotiate */ + if (!bclass->is_open (camerasrc)) + return FALSE; + + return gst_camerasrc_init_from_caps (camerasrc, caps); +} + +/* + */ +static gboolean +gst_camerasrc_query (GstBaseSrc * bsrc, GstQuery * query) +{ + GstCameraSrc *src; + GstCameraSrcClass *bclass; + gboolean res = FALSE; + guint num_buffers; + + src = GST_CAMERA_SRC_CAST (bsrc); + bclass = GST_CAMERA_SRC_GET_CLASS (src); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_LATENCY:{ + GstClockTime max_latency; + + /* device must be open */ + if (!bclass->is_open (src)) { + GST_WARNING_OBJECT (src, + "Can't give latency since device isn't open !"); + goto done; + } + + /* we must have a framerate */ + if (src->fps_n <= 0 || src->fps_d <= 0) { + GST_WARNING_OBJECT (src, + "Can't give latency since framerate isn't fixated !"); + goto done; + } + + num_buffers = bclass->get_num_buffers (src); + /* max latency is total duration of the frame buffer */ + max_latency = num_buffers * src->duration; + + GST_DEBUG_OBJECT (bsrc, + "report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT, + GST_TIME_ARGS (src->duration), GST_TIME_ARGS (max_latency)); + + /* we are always live, the min latency is 1 frame and the max latency is + * the complete buffer of frames. */ + gst_query_set_latency (query, TRUE, src->duration, max_latency); + + res = TRUE; + break; + } + default: + res = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query); + break; + } + +done: + + return res; +} + +static gboolean +gst_camerasrc_start (GstBaseSrc * src) +{ +// GstCameraSrc *camerasrc = GST_CAMERA_SRC_CAST (src); + + return TRUE; +} + +/* + */ +static gboolean +gst_camerasrc_stop (GstBaseSrc * src) +{ + GstCameraSrc *camerasrc; + GstCameraSrcClass *bclass; + + camerasrc = GST_CAMERA_SRC_CAST (src); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "Stopping"); + + bclass->stop (camerasrc); + + camerasrc->photo_capture_phase = GST_CAMERA_VIEWFINDER; + + return TRUE; +} + +static gboolean +gst_camerasrc_unlock (GstBaseSrc * src) +{ + GstCameraSrcClass *pclass; + GstCameraSrc *camerasrc; + gboolean ret = TRUE; + + camerasrc = GST_CAMERA_SRC_CAST (src); + pclass = GST_CAMERA_SRC_GET_CLASS (src); + + GST_DEBUG_OBJECT (camerasrc, "Unlock"); + + if (pclass->unlock) + ret = pclass->unlock (camerasrc); + + return ret; +} + +static gboolean +gst_camerasrc_unlock_stop (GstBaseSrc * src) +{ + GstCameraSrcClass *pclass; + GstCameraSrc *camerasrc; + gboolean ret = TRUE; + + camerasrc = GST_CAMERA_SRC_CAST (src); + pclass = GST_CAMERA_SRC_GET_CLASS (src); + + GST_DEBUG_OBJECT (camerasrc, "Unlock stop"); + + if (pclass->unlock_stop) + ret = pclass->unlock_stop (camerasrc); + + return ret; +} + + +/* + * gst_camerasrc_send_image_tags: + * @camerasrc: #GstCameraSrc object. + * + */ +static gboolean +gst_camerasrc_send_image_tags (GstCameraSrc * camerasrc) +{ + GstTagMergeMode mode; + GstCameraSrcClass *bclass; + guint iso, wbalance; + GstEvent *tagevent; + GstTagList *tlist; + gfloat zoom; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + tlist = gst_tag_list_new (); + zoom = camerasrc->photoconf.zoom; + iso = camerasrc->photoconf.iso_speed; + wbalance = + (camerasrc->photoconf.wb_mode == GST_PHOTOGRAPHY_WB_MODE_AUTO) ? 0 : 1; + + gst_tag_register_musicbrainz_tags(); + + mode = GST_TAG_MERGE_REPLACE; + gst_tag_list_add(tlist, mode, GST_TAG_CAPTURING_DIGITAL_ZOOM_RATIO, (gdouble)zoom, NULL); + gst_tag_list_add(tlist, mode, GST_TAG_CAPTURING_ISO_SPEED, iso, NULL); + + if(camerasrc->maker_note) + { + int handle; + gboolean ret; + GstBuffer *pmakerbuf = NULL; + unsigned buf_size; + unsigned num_afwindows, num_awbgrid; + + num_afwindows = 1; + num_awbgrid = 1; + ret = bclass->makernote_init(camerasrc, &buf_size, num_afwindows, 0, 0, num_awbgrid, &handle); + if(TRUE == ret) + { + pmakerbuf = gst_buffer_new_and_alloc(buf_size); + + ret = bclass->makernote_deal(camerasrc, pmakerbuf, num_afwindows, num_awbgrid, handle); + if(TRUE == ret) + { + gst_tag_list_add(tlist, mode, GST_TAG_APPLICATION_DATA, pmakerbuf, NULL); + } + + bclass->makernote_uninit(camerasrc, handle); + gst_buffer_unref(pmakerbuf); + } + } + + if (bclass->fill_image_tags) + bclass->fill_image_tags (camerasrc, tlist); + + tagevent = gst_event_new_tag (gst_tag_list_copy (tlist)); + gst_pad_push_event (GST_BASE_SRC_PAD (camerasrc), tagevent); + GST_DEBUG_OBJECT (camerasrc, "image tags sent: %" GST_PTR_FORMAT, tlist); + gst_tag_list_free (tlist); + + return TRUE; +} + + +/* + * gst_camerasrc_send_preview: + * @camsrc: #GstCameraSrc object + * + * Sends HQ image preview image (snapshot) as a GstMessage. + * + * Returns: TRUE on success. + */ +static gboolean +gst_camerasrc_send_preview (GstCameraSrc * camsrc) +{ + GstCameraSrcClass *bclass; + GstBuffer *buf = NULL; + GstStructure *msg_s = NULL; + GstCaps *prvcaps = NULL; + GstMessage *msg = NULL; + gboolean ret = FALSE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camsrc); + + if (bclass->get_preview_image) { + ret = bclass->get_preview_image (camsrc, &buf); + } + + if (ret) { + GST_DEBUG_OBJECT (camsrc, "Sending preview image"); + prvcaps = gst_caps_new_simple ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, camsrc->preview_fourcc, + "width", G_TYPE_INT, camsrc->preview_w, + "height", G_TYPE_INT, camsrc->preview_h, + "framerate", GST_TYPE_FRACTION, 1, 1, NULL); + + gst_buffer_set_caps (buf, prvcaps); + + /* Send custom preview image GstMessage */ + msg_s = gst_structure_new (GST_CAMERA_SRC_PREVIEW_IMAGE, "buffer", + GST_TYPE_BUFFER, buf, NULL); + + msg = gst_message_new_element (GST_OBJECT (camsrc), msg_s); + + if (gst_element_post_message (GST_ELEMENT (camsrc), msg) == FALSE) { + GST_WARNING ("This element has no bus, therefore no message sent!"); + } + + gst_caps_unref (prvcaps); + } else { + GST_DEBUG_OBJECT (camsrc, "Retrieving preview image failed"); + } + + /* if we still have valid settings for preview, reserve a new buffer */ + if (camsrc->preview_resolution_set) { + GST_DEBUG_OBJECT (camsrc, "Reserving a new preview buffer"); + bclass->set_capture (camsrc, GST_PHOTOGRAPHY_OPERATION_MODE_PREVIEW, FALSE, + &camsrc->preview_fourcc, &camsrc->preview_w, &camsrc->preview_h, + NULL, NULL); + } + + if (buf) { + gst_buffer_unref (buf); + } + + return ret; +} + + +/* + * gst_camerasrc_set_capture_caps: + * @camerasrc: #GstCameraSrc object. + * + * Set the capture caps on element's src pad. + * + * Returns: TRUE on success. + */ +static gboolean +gst_camerasrc_set_capture_caps (GstCameraSrc * camerasrc) +{ + GstCameraSrcClass *bclass; + GstCaps *newcaps = NULL; + gboolean ret = TRUE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_DEBUG_OBJECT (camerasrc, "Calling set_capture()"); + + /* Try what resolution the subclass can capture */ + ret = bclass->set_capture (camerasrc, + GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE, TRUE, + &camerasrc->capture_fourcc, &camerasrc->capture_w, &camerasrc->capture_h, + &camerasrc->capture_fps_n, &camerasrc->capture_fps_d); + + if (!ret) { + goto done; + } + + /* FIXME: FPS definition should be removed from capture caps */ + newcaps = gst_camerasrc_get_caps_from_info (camerasrc, + camerasrc->capture_fourcc, camerasrc->capture_w, camerasrc->capture_h, + camerasrc->capture_fps_n, camerasrc->capture_fps_d); + + GST_DEBUG_OBJECT (camerasrc, "Set capture caps: %" GST_PTR_FORMAT, newcaps); + + /* Notify application that we are ready now. This must be called before */ + /* the set_caps() call below, since application needs to know the new caps */ + /* so that it can adjust the caps filter to accept new format before */ + /* srcpad caps are actually changed */ + gst_camerasrc_photo_ready_for_capture (camerasrc, newcaps); + + camerasrc->vf_caps = + gst_pad_get_negotiated_caps (GST_BASE_SRC_PAD (camerasrc)); + + /* This causes caps nego and switches resolution to hi-res mode */ + /* FIXME: Do we even need to set this? Application has already set the + * capsfilter */ + if (!gst_caps_is_equal (camerasrc->vf_caps, newcaps)) { + GST_DEBUG_OBJECT (camerasrc, "Setting image capture caps"); + ret = gst_pad_set_caps (GST_BASE_SRC_PAD (camerasrc), newcaps); + GST_DEBUG_OBJECT (camerasrc, "Setting image capture caps FINISHED"); + } else { + GST_DEBUG_OBJECT (camerasrc, "Forcing the re-initialization"); + ret = gst_camerasrc_configure_device (camerasrc, &camerasrc->current_w, + &camerasrc->current_h, &camerasrc->current_fourcc, &camerasrc->fps_n, + &camerasrc->fps_d, newcaps); + gst_caps_unref (camerasrc->vf_caps); + camerasrc->vf_caps = NULL; + } + + gst_caps_unref (newcaps); + +done: + + return ret; +} + + +/* + * + */ +static void +gst_camerasrc_apply_timestamp (GstCameraSrc * camerasrc, GstBuffer * buf) +{ + GstClock *clock; + GstClockTime timestamp; + + GST_OBJECT_LOCK (camerasrc); + if ((clock = GST_ELEMENT_CLOCK (camerasrc))) { + /* we have a clock, get base time and ref clock */ + timestamp = GST_ELEMENT (camerasrc)->base_time; + gst_object_ref (clock); + } else { + /* no clock, can't set timestamps */ + timestamp = GST_CLOCK_TIME_NONE; + } + GST_OBJECT_UNLOCK (camerasrc); + + if (clock) { + /* the time now is the time of the clock minus the base time */ + timestamp = gst_clock_get_time (clock) - timestamp; + gst_object_unref (clock); + + if (timestamp > camerasrc->duration) + timestamp -= camerasrc->duration; + else + timestamp = 0; + } + + /* we could also use the timestamp from the buffer itself */ + GST_BUFFER_TIMESTAMP (buf) = timestamp; + GST_BUFFER_DURATION (buf) = camerasrc->duration; +} + + +/* + */ +static GstFlowReturn +gst_camerasrc_create (GstPushSrc * src, GstBuffer ** buf) +{ + GstCameraSrcClass *bclass; + GstCameraSrc *camerasrc; + GstBaseSrcClass *basesrc_class; + + GstBuffer *buf_cap_signal1 = NULL; /*output main buffer for capture signal*/ + + gboolean still_capture_initialised = FALSE; + GstFlowReturn ret = GST_FLOW_OK; + GstBuffer *temp; + + camerasrc = GST_CAMERA_SRC_CAST (src); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + basesrc_class = GST_BASE_SRC_CLASS (bclass); + + if (camerasrc->req_negotiation) { + GST_DEBUG_OBJECT(camerasrc, "negotiation start"); + basesrc_class->negotiate(GST_BASE_SRC(camerasrc)); + camerasrc->req_negotiation = FALSE; + g_signal_emit(G_OBJECT(camerasrc), gst_camerasrc_signals[SIGNAL_NEGO_COMPLETE], (GQuark)NULL); + GST_DEBUG_OBJECT (camerasrc, "negotiation stop"); + } + + +start_over: + + g_mutex_lock (camerasrc->state_lock); + + if (camerasrc->photo_capture_phase == GST_CAMERA_CAPTURE_START) { + /* Tell subclass to stop flushing buffers */ + if (bclass->unlock_stop) { + GST_DEBUG_OBJECT (camerasrc, "Stop flushing, capture is starting"); + bclass->unlock_stop (camerasrc); + } + + camerasrc->photo_capture_phase = GST_CAMERA_CAPTURE; + + GST_DEBUG_OBJECT (camerasrc, + "Flushing old buffers before starting HQ capture"); + gst_pad_push_event (GST_BASE_SRC_PAD (camerasrc), + gst_event_new_flush_start ()); + gst_pad_push_event (GST_BASE_SRC_PAD (camerasrc), + gst_event_new_flush_stop ()); + + if (!gst_camerasrc_set_capture_caps (camerasrc)) { + goto hq_capture_failed; + } + still_capture_initialised = TRUE; + } + + else if (camerasrc->photo_capture_phase == GST_CAMERA_CAPTURE_DONE) { + + gboolean ret; + + camerasrc->photo_capture_phase = GST_CAMERA_VIEWFINDER; + camerasrc->requested_af_mode = AF_OFF_REQUESTED; + camerasrc->capture_mode = GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER; + GST_DEBUG_OBJECT (camerasrc, "capture done. switching to viewfinder"); + + /* Set the normal viewfinder resolution back */ + if (camerasrc->vf_caps) { + GST_DEBUG_OBJECT (camerasrc, "set VF caps"); + ret = gst_pad_set_caps (GST_BASE_SRC_PAD (camerasrc), camerasrc->vf_caps); + gst_caps_unref (camerasrc->vf_caps); + camerasrc->vf_caps = NULL; + } else { + GstCaps *tmp; + + tmp = gst_pad_get_negotiated_caps (GST_BASE_SRC_PAD (camerasrc)); + + /* Reconfigure the device to run viewfinder again */ + ret = gst_camerasrc_configure_device (camerasrc, &camerasrc->current_w, + &camerasrc->current_h, &camerasrc->current_fourcc, &camerasrc->fps_n, + &camerasrc->fps_d, tmp); + + gst_caps_unref (tmp); + } + + if (!ret) { + GST_WARNING_OBJECT (camerasrc, "Reinitializing viewfinder failed"); + g_mutex_unlock (camerasrc->state_lock); + return GST_FLOW_ERROR; + } + + GST_DEBUG_OBJECT (camerasrc, "viewfinder running"); + } + + g_mutex_lock (camerasrc->af_lock); + + /* Handle AF requests only in VIEWFINDER and AUTOFOCUS states */ + if ((camerasrc->photo_capture_phase == GST_CAMERA_VIEWFINDER || + camerasrc->photo_capture_phase == GST_CAMERA_AUTOFOCUS || + camerasrc->photo_capture_phase == GST_CAMERA_AUTOFOCUS_DONE) && + camerasrc->requested_af_mode != AF_NONE_REQUESTED) { + if (camerasrc->requested_af_mode == AF_ON_REQUESTED) { + gboolean ret; + + /* In still capture mode AE will be locked during AF operation */ + if (camerasrc->viewfinder_mode == GST_CAMERA_SRC_VIEWFINDER_MODE_STILL) { + bclass->set_autoexposure (camerasrc, FALSE); + } + ret = bclass->set_autofocus (camerasrc, TRUE); + + if (ret) { + camerasrc->photo_capture_phase = GST_CAMERA_AUTOFOCUS; + } else { + /* Starting AF failed, so start AE again */ + bclass->set_autoexposure (camerasrc, TRUE); + } + } else { + bclass->set_autofocus (camerasrc, FALSE); + bclass->set_autoexposure (camerasrc, TRUE); + camerasrc->photo_capture_phase = GST_CAMERA_VIEWFINDER; + } + + camerasrc->requested_af_mode = AF_NONE_REQUESTED; + } + + g_mutex_unlock (camerasrc->af_lock); + g_mutex_unlock (camerasrc->state_lock); + + ret = bclass->grab_frame (camerasrc, &temp, camerasrc->photo_capture_phase); + + g_mutex_lock (camerasrc->state_lock); + + if (ret != GST_FLOW_OK) { + /* _prepare_for_capture() may have interrupted frame grabbing. */ + if (ret == GST_FLOW_WRONG_STATE && + camerasrc->photo_capture_phase == GST_CAMERA_CAPTURE_START) { + g_mutex_unlock (camerasrc->state_lock); + ret = GST_FLOW_OK; + goto start_over; + } else { + g_mutex_unlock (camerasrc->state_lock); + goto leave; + } + } + + *buf = temp; + + /* Post-capture phase */ + + if (camerasrc->photo_capture_phase == GST_CAMERA_CAPTURE) { + GstCaps *src_caps; + + src_caps = gst_pad_get_negotiated_caps (GST_BASE_SRC_PAD (camerasrc)); + gst_buffer_set_caps (*buf, src_caps); + gst_caps_unref (src_caps); + + /* Restore the original number of buffers after capture is done */ + /* FIXME: Commented out */ +// camerasrc->num_buffers = tmp_num_buffers; + +// gst_camerasrc_send_image_tags (camerasrc); + + gst_camerasrc_send_preview (camerasrc); + + camerasrc->photo_capture_phase = GST_CAMERA_CAPTURE_DONE; + } + + if(GST_CAMERA_SRC_CAPTURE_MODE_STILL == camerasrc->capture_mode) { + gst_camerasrc_send_image_tags (camerasrc); + if ((camerasrc->signal_still_capture == TRUE) && (camerasrc->photo_capture_phase == GST_CAMERA_CAPTURE_DONE) && (still_capture_initialised == TRUE)) { + still_capture_initialised = FALSE; + /*call signal*/ + /* alloc buffer for capture callback */ + buf_cap_signal1 = gst_buffer_new (); + + GST_BUFFER_DATA(buf_cap_signal1) = GST_BUFFER_DATA(*buf); + GST_BUFFER_SIZE(buf_cap_signal1) = GST_BUFFER_SIZE(*buf); + GST_BUFFER_CAPS(buf_cap_signal1) = gst_caps_new_simple("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, camerasrc->capture_fourcc, + "width", G_TYPE_INT, camerasrc->capture_w, + "height", G_TYPE_INT,camerasrc->capture_h, + NULL); + + GST_LOG_OBJECT (camerasrc, "CALL: capture callback"); + g_mutex_unlock (camerasrc->state_lock); + g_signal_emit( G_OBJECT (camerasrc), + gst_camerasrc_signals[SIGNAL_STILL_CAPTURE], + 0, + buf_cap_signal1, + NULL, + NULL ); + GST_LOG_OBJECT (camerasrc, "RETURN: capture callback"); + g_mutex_lock (camerasrc->state_lock); + + + if(--camerasrc->capture_counter > 0) { + camerasrc->photo_capture_phase = GST_CAMERA_CAPTURE_START; + GST_DEBUG_OBJECT (camerasrc, "do some more captures count %d",camerasrc->capture_counter); + } + else + camerasrc->photo_capture_phase = GST_CAMERA_CAPTURE_DONE; + + g_mutex_unlock (camerasrc->state_lock); + goto start_over; + } + } + + if (GST_BUFFER_TIMESTAMP (*buf) == GST_CLOCK_TIME_NONE) { + gst_camerasrc_apply_timestamp (camerasrc, *buf); + } + +done: + + g_mutex_unlock (camerasrc->state_lock); + +leave: + return ret; + + /* ERRORS */ +hq_capture_failed: + GST_ELEMENT_ERROR (camerasrc, RESOURCE, READ, + ("Error during HQ capture"), (NULL)); + ret = GST_FLOW_ERROR; + goto done; +} + + +static GstStateChangeReturn +gst_camerasrc_change_state (GstElement * element, GstStateChange transition) +{ + GstCameraSrcClass *bclass; + GstCameraSrc *camerasrc; + GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; + + camerasrc = GST_CAMERA_SRC_CAST (element); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + + GST_LOG_OBJECT (camerasrc, "State change: %s -> %s", + gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)), + gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition))); + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + /* Open the device */ + if (!bclass->open (camerasrc)) + return GST_STATE_CHANGE_FAILURE; + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_READY_TO_NULL: + if (bclass->is_open (camerasrc)) { + bclass->close (camerasrc); + } + camerasrc->capture_w = camerasrc->preview_w = camerasrc->current_w = 0; + camerasrc->capture_h = camerasrc->preview_h = camerasrc->current_h = 0; + camerasrc->capture_fourcc = camerasrc->preview_fourcc = 0; + camerasrc->current_fourcc = 0; + camerasrc->fps_d = camerasrc->fps_n = 0; + camerasrc->capture_resolution_set = FALSE; + camerasrc->preview_resolution_set = FALSE; + + /* Notify that preview caps have changed (to NULL) */ + g_object_notify (G_OBJECT (camerasrc), + GST_PHOTOGRAPHY_PROP_IMAGE_PREVIEW_SUPPORTED_CAPS); + + break; + default: + break; + } + + return ret; +} + + +/** + * gst_camerasrc_add_color_channel: + * @camsrc: #GstCameraSrc object. + * @channel: #GstColorBalanceChannel object. + * + * Add a new color channel to list. + */ +void +gst_camerasrc_add_color_channel (GstCameraSrc * camsrc, + GstColorBalanceChannel * channel) +{ + camsrc->colors = g_list_append (camsrc->colors, (gpointer) channel); +} + + +/** + * gst_camerasrc_clear_color_channels: + * @camsrc: #GstCameraSrc object. + * + * Delete all color channels. + */ +void +gst_camerasrc_clear_color_channels (GstCameraSrc * camsrc) +{ + g_list_foreach (camsrc->colors, (GFunc) g_object_unref, NULL); + g_list_free (camsrc->colors); + camsrc->colors = NULL; +} + +/** + * gst_camerasrc_add_cameracontrol_channel: + * @camsrc: #GstCameraSrc object. + * @channel: #GstColorBalanceChannel object. + * + * Add a new cameracontrol channel to list. + */ +void +gst_camerasrc_add_cameracontrol_channel (GstCameraSrc * camsrc, + GstCameraControlChannel * channel) +{ + camsrc->camera_controls = g_list_append (camsrc->camera_controls, (gpointer) channel); +} + +/** + * gst_camerasrc_clear_cameracontrol_channels: + * @camsrc: #GstCameraSrc object. + * + * Delete all cameracontrol channels. + */ + +void +gst_camerasrc_clear_cameracontrol_channels (GstCameraSrc * camsrc) +{ + g_list_foreach (camsrc->camera_controls, (GFunc) g_object_unref, NULL); + g_list_free (camsrc->camera_controls); + camsrc->camera_controls = NULL; +} + +/** + * gst_camerasrc_send_capture_start_message: + * @camsrc: GstCameraSrc object + * + * Sends a GstMessage notification to GstBus that capture operation is + * about to start. + */ +void +gst_camerasrc_send_capture_start_message (GstCameraSrc * camsrc) +{ + GstStructure *s; + GstMessage *msg; + + GST_DEBUG_OBJECT (camsrc, "Sending capture-start message"); + + /* Send custom GstMessage "photo-capture-start" */ + s = gst_structure_new (GST_CAMERA_SRC_CAPTURE_START, NULL); + msg = gst_message_new_element (GST_OBJECT (camsrc), s); + + if (gst_element_post_message (GST_ELEMENT (camsrc), msg) == FALSE) { + GST_WARNING ("This element has no bus, therefore no message sent!"); + } + + GST_LOG_OBJECT (camsrc, "Capture-start message sent"); +} + + +/** + * gst_camerasrc_send_capture_stop_message: + * @camsrc: GstCameraSrc object + * + * Sends a GstMessage notification to GstBus that capture operation has + * just finished. + */ +void +gst_camerasrc_send_capture_stop_message (GstCameraSrc * camsrc) +{ + GstStructure *s; + GstMessage *msg; + + GST_DEBUG_OBJECT (camsrc, "Sending capture-stop message"); + + /* Send custom GstMessage "photo-capture-end" */ + s = gst_structure_new (GST_CAMERA_SRC_CAPTURE_END, NULL); + msg = gst_message_new_element (GST_OBJECT (camsrc), s); + + if (gst_element_post_message (GST_ELEMENT (camsrc), msg) == FALSE) { + GST_WARNING ("This element has no bus, therefore no message sent!"); + } + + GST_LOG_OBJECT (camsrc, "Capture-stop message sent"); +} + + +/** + * gst_camerasrc_af_update: + * @camsrc: #GstCameraSrc object. + * @fs: #GstCameraFocusStatus structure. Owned by caller. + * + * Tell GstCameraSrc that previously started autofocus operation has finished. + */ +void +gst_camerasrc_af_update (GstCameraSrc * camsrc, GstCameraFocusStatus * fs) +{ + GstCameraSrcClass *bclass; + + bclass = GST_CAMERA_SRC_GET_CLASS (camsrc); + + g_mutex_lock (camsrc->state_lock); + g_mutex_lock (camsrc->af_lock); + + if (fs->status == GST_PHOTOGRAPHY_FOCUS_STATUS_SUCCESS || + fs->status == GST_PHOTOGRAPHY_FOCUS_STATUS_FAIL) { + GstStructure *s; + GstMessage *msg; + + GST_DEBUG_OBJECT (camsrc, "autofocusing ended"); + + /* Send custom GstMessage "autofocus-done" */ + s = gst_structure_new (GST_PHOTOGRAPHY_AUTOFOCUS_DONE, + "status", G_TYPE_INT, fs->status, NULL); + + /* If autofocus succeeded, send the bitmask that defines focused + * windows too */ + if (fs->status == GST_PHOTOGRAPHY_FOCUS_STATUS_SUCCESS) { + GArray *windows; + guint i; + + windows = g_array_new (FALSE, FALSE, sizeof (gint)); + for (i = 0; i < fs->num_windows; i++) { + GST_DEBUG_OBJECT (camsrc, "focus window: %d", fs->windows[i]); + g_array_append_val (windows, fs->windows[i]); + } + + gst_structure_set (s, + //"focus-windows", G_TYPE_ARRAY, windows, + "focus-window-rows", G_TYPE_INT, fs->focus_rows, + "focus-window-columns", G_TYPE_INT, fs->focus_columns, + "focus-window-coverage", G_TYPE_INT, fs->coverage, NULL); + + GST_DEBUG_OBJECT (camsrc, "focus rows: %d", fs->focus_rows); + GST_DEBUG_OBJECT (camsrc, "focus columns: %d", fs->focus_columns); + GST_DEBUG_OBJECT (camsrc, "focus coverage: %d", fs->coverage); + } + + msg = gst_message_new_element (GST_OBJECT (camsrc), s); + + if (gst_element_post_message (GST_ELEMENT (camsrc), msg) == FALSE) { + GST_WARNING ("This element has no bus, therefore no message sent!"); + } + + /* In still capture mode we don't turn off AF algorithm yet, since it */ + /* would enable CAF. Instead, it is turned off when application */ + /* explicitly calls set_autofocus (FALSE), which in turn raises */ + /* af_requested = OFF flag and AF is finally stopped. */ + + /* In video capture mode AF will be stopped immediately to enable AE */ + if (camsrc->viewfinder_mode == GST_CAMERA_SRC_VIEWFINDER_MODE_VIDEO) { + bclass->set_autofocus (camsrc, FALSE); + } + + /* We don't turn on autoexposure here either. This way AE stays */ + /* "locked" until application explicitly calls set_autofocus (FALSE). */ + + camsrc->photo_capture_phase = GST_CAMERA_AUTOFOCUS_DONE; + } + + g_mutex_unlock (camsrc->af_lock); + g_mutex_unlock (camsrc->state_lock); +} + + +/** + * gst_camerasrc_caf_update: + * @camsrc: GstCameraSrc object. + * @fs: #GstCameraFocusStatus structure. Owned by caller. + * + * Tell GstCameraSrc that continuous autofocus algorithm has changed its state. + * Sends a GstMessage notification to GstBus indicating a change in + * continuous autofocus status. + */ +void +gst_camerasrc_caf_update (GstCameraSrc * camsrc, GstCameraFocusStatus * fs) +{ + GstStructure *s; + GstMessage *msg; + + GST_DEBUG_OBJECT (camsrc, "Sending CAF status: %d", fs->status); + + /* Send custom GstMessage "caf-update" */ + s = gst_structure_new (GST_CAMERA_SRC_CAF_STATUS, NULL); + gst_structure_set (s, "status", G_TYPE_INT, fs->status, NULL); + msg = gst_message_new_element (GST_OBJECT (camsrc), s); + + if (gst_element_post_message (GST_ELEMENT (camsrc), msg) == FALSE) { + GST_WARNING ("This element has no bus, therefore no message sent!"); + } + + GST_LOG_OBJECT (camsrc, "CAF update message sent"); +} + + +/** + * gst_camerasrc_shake_update: + * @camsrc: GstCameraSrc object. + * @risk: GstPhotoShakeRisk value. + * + * Tell GstCameraSrc that shake risk has changed. Sends a GstMessage + * notification to GstBus indicating a change in shake risk status. + */ +void +gst_camerasrc_shake_update (GstCameraSrc * camsrc, GstPhotoShakeRisk risk) +{ + GstStructure *s; + GstMessage *msg; + + GST_DEBUG_OBJECT (camsrc, "Sending shake risk update: %d", risk); + + /* Send custom GstMessage telling the changed shake risk level */ + s = gst_structure_new (GST_PHOTOGRAPHY_SHAKE_RISK, NULL); + gst_structure_set (s, "shake_risk", G_TYPE_INT, risk, NULL); + msg = gst_message_new_element (GST_OBJECT (camsrc), s); + + if (gst_element_post_message (GST_ELEMENT (camsrc), msg) == FALSE) { + GST_WARNING ("This element has no bus, therefore no message sent!"); + } + + GST_LOG_OBJECT (camsrc, "Shake indicator message sent"); +} + + +/* Tag helper functions */ + +/** + * gst_camerasrc_exposure_mode_from_exif_value: + * @value: exposure mode in EXIF format. + * + * Convert exposure mode to string. + * + * Returns: String representation of the scene capture type, or NULL if invalid + * value was given. Possible values: "auto-exposure", "manual-exposure" and + * "auto-bracket". + */ +const gchar * +gst_camerasrc_exposure_mode_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "auto-exposure"; + case 1: + return "manual-exposure"; + case 2: + return "auto-bracket"; + default: + GST_WARNING ("Invalid exif exposure mode: %d", value); + return NULL; + } +} + +/** + * gst_camerasrc_scene_capture_type_from_exif_value: + * @value: scene capture type in EXIF format. + * + * Convert scene capture type to string. + * + * Returns: String representation of the scene capture type, or NULL if invalid + * value was given. Possible values: "standard", "landscape", "portrait" and + * "night-scene". + */ +const gchar * +gst_camerasrc_scene_capture_type_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "standard"; + case 1: + return "landscape"; + case 2: + return "portrait"; + case 3: + return "night-scene"; + default: + GST_WARNING ("Invalid exif scene capture type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_gain_adjustment_from_exif_value: + * @value: gain adjustment type in EXIF format. + * + * Convert gain adjustment type to string. + * + * Returns: String representation of the gain adjustment type, or NULL if + * invalid value was given. Possible values: "none", "low-gain-up", + * "high-gain-up", "low-gain-down" and "high-gain-down". + */ +const gchar * +gst_camerasrc_gain_adjustment_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "none"; + case 1: + return "low-gain-up"; + case 2: + return "high-gain-up"; + case 3: + return "low-gain-down"; + case 4: + return "high-gain-down"; + default: + GST_WARNING ("Invalid exif gain control type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_contrast_from_exif_value: + * @value: contrast type in EXIF format. + * + * Convert contrast type to string. + * + * Returns: String representation of the contrast type, or NULL if invalid + * value was given. Possible values: "normal", "soft" and "hard". + */ +const gchar * +gst_camerasrc_contrast_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "normal"; + case 1: + return "soft"; + case 2: + return "hard"; + default: + GST_WARNING ("Invalid contrast type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_saturation_from_exif_value: + * @value: saturation type in EXIF format. + * + * Convert saturation type to string. + * + * Returns: String representation of the saturation type, or NULL if invalid + * value was given. Possible values: "normal", "low-saturation" and + * "high-saturation". + */ +const gchar * +gst_camerasrc_saturation_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "normal"; + case 1: + return "low-saturation"; + case 2: + return "high-saturation"; + default: + GST_WARNING ("Invalid saturation type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_flash_mode_from_exif_value: + * @value: flash EXIF field + * + * Convert "flash" EXIF field into text string. + * + * Returns: String representation of the flash mode, or NULL if invalid + * value was given. Possible values: "always", "never" and "auto". + */ +const gchar * +gst_camerasrc_flash_mode_from_exif_value (gint value) +{ + // frame_info_t + + // bits 3 and 4 indicate the flash mode, + // + // Values for bits 3 and 4 indicating the camera's flash mode. + // 00b = unknown + // 01b = Compulsory flash firing + // 10b = Compulsory flash suppression + // 11b = Auto mode + // + + /* Bit numbering in EXIF spec starts from 0 */ + value >>= 3; + + switch (value & 0x3) { + case 1: + return "always"; + case 2: + return "never"; + case 3: + return "auto"; + default: + GST_WARNING ("Invalid flash mode type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_sharpness_from_exif_value: + * @value: sharpness type in EXIF format. + * + * Convert sharpness type to string. + * + * Returns: String representation of the sharpness type, or NULL if invalid + * value was given. Possible values: "normal", "soft" and "hard". + */ +const gchar * +gst_camerasrc_sharpness_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "normal"; + case 1: + return "soft"; + case 2: + return "hard"; + default: + GST_WARNING ("Invalid sharpness type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_metering_mode_from_exif_value: + * @value: metering mode type in EXIF format. + * + * Convert metering mode type to string. + * + * Returns: String representation of the metering mode type, or NULL if invalid + * value was given. Possible values: "unknown", "average", + * "center-weighted-average", "spot", "multi-spot", "pattern", "partial" and + * "other". + */ +const gchar * +gst_camerasrc_metering_mode_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "unknown"; + case 1: + return "average"; + case 2: + return "center-weighted-average"; + case 3: + return "spot"; + case 4: + return "multi-spot"; + case 5: + return "pattern"; + case 6: + return "partial"; + case 255: + return "other"; + default: + GST_WARNING ("Invalid metering mode type: %d", value); + return NULL; + } +} + + +/** + * gst_camerasrc_file_source_from_exif_value: + * @value: FileSource type in EXIF format. + * + * Convert FileSource type to string. + * + * Returns: String representation of the FileSource type, or NULL if invalid + * value was given. Possible values: "dsc", "transparent-scanner", + * "reflex-scanner" and "other". + */ +const gchar * +gst_camerasrc_file_source_from_exif_value (gint value) +{ + switch (value) { + case 0: + return "other"; + case 1: + return "transparent-scanner"; + case 2: + return "reflex-scanner"; + case 3: + return "dsc"; + default: + GST_WARNING ("Invalid file source type: %d", value); + return NULL; + } +} + + +/* Default implementations for vmethods */ + +static GstPhotoCaps +gst_camerasrc_default_capabilities (GstCameraSrc * camsrc) +{ + return GST_PHOTOGRAPHY_CAPS_NONE; +} + +static gboolean +gst_camerasrc_default_ret_true_with_settings (GstCameraSrc * camsrc, + GstPhotoSettings * photoconf) +{ + return TRUE; +} + +static gboolean +gst_camerasrc_default_write_settings (GstCameraSrc * camsrc, + GstPhotoSettings * photoconf, gboolean scene_mode_override) +{ + return TRUE; +} + +static gboolean +gst_camerasrc_default_set_onoff (GstCameraSrc * camsrc, gboolean on_off) +{ + return TRUE; +} + +static GstCaps * +gst_camerasrc_default_get_caps (GstCameraSrc * camsrc, GstOperationMode mode) +{ + GST_DEBUG_OBJECT (camsrc, "Returning NULL caps for mode %d", mode); + + return NULL; +} +static gboolean +gst_camerasrc_default_read_exif (GstCameraSrc *camsrc, + GstCameraControlExifInfo *exif_info) +{ + GST_DEBUG_OBJECT (camsrc, "default implementation of exif read called return defaults %d"); + // TODO: fill with default values + return TRUE; +} + +static gboolean +gst_camerasrc_default_set_flash_mode (GstCameraSrc *camsrc, int value) +{ + return TRUE; +} + +static void +gst_camerasrc_default_functions_init (GstCameraSrcClass * camera_class) +{ + camera_class->get_capabilities = gst_camerasrc_default_capabilities; + camera_class->set_autofocus = gst_camerasrc_default_set_onoff; + camera_class->set_autoexposure = gst_camerasrc_default_set_onoff; + camera_class->read_settings = gst_camerasrc_default_ret_true_with_settings; + camera_class->write_settings = gst_camerasrc_default_write_settings; + camera_class->get_supported_caps = gst_camerasrc_default_get_caps; + camera_class->set_flash_mode = gst_camerasrc_default_set_flash_mode; + camera_class->read_exif = gst_camerasrc_default_read_exif; + + GST_DEBUG ("Default functions set"); +} + + +static gboolean +gst_camerasrc_handle_event (GstCameraSrc * camerasrc, GstEvent * event) +{ + GstCameraSrcClass *bclass; + gboolean ret = FALSE; + + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + GST_LOG_OBJECT (camerasrc, "handling %s event", GST_EVENT_TYPE_NAME (event)); + + if (bclass->event) { + ret = bclass->event (camerasrc, event); + } + + return ret; +} + + +/* + * gst_camerasrc_send_event: + * @element: GstElement object. + * @event: GstEvent to be handled. + * + * Returns: TRUE if the event was handled. + */ +static gboolean +gst_camerasrc_send_event (GstElement * element, GstEvent * event) +{ + GstCameraSrc *camerasrc; + gboolean ret = FALSE; + + camerasrc = GST_CAMERA_SRC_CAST (element); + + GST_LOG_OBJECT (camerasrc, "got %s event", GST_EVENT_TYPE_NAME (event)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CUSTOM_UPSTREAM: + ret = gst_camerasrc_handle_event (camerasrc, event); + break; + default: + break; + } + + if (!ret) { + ret = GST_ELEMENT_CLASS (parent_class)->send_event (element, event); + } + + return ret; +} + + +/* + * gst_camerasrc_event: + * @src: #GstBaseSrc object. + * @event: #GetEvent object. + * + * Returns: TRUE of the event was handled. + */ +static gboolean +gst_camerasrc_event (GstBaseSrc * src, GstEvent * event) +{ + GstCameraSrc *camerasrc; + gboolean ret; + + camerasrc = GST_CAMERA_SRC_CAST (src); + ret = gst_camerasrc_handle_event (camerasrc, event); + + if (!ret) { + ret = GST_BASE_SRC_CLASS (parent_class)->event (src, event); + } + + return ret; +} + + +/** + * gst_camerasrc_update_max_zoom: + * @camerasrc: #GstCameraSrc object. + * + * Check and update zoom property maximum value. + */ +static void +gst_camerasrc_update_max_zoom (GstCameraSrc * camerasrc) +{ + GstCameraSrcClass *bclass; + GObjectClass *oclass; + GParamSpec *pspec; + GParamSpecFloat *pspec_f; + gfloat maxzoom = 10.0; + + oclass = G_OBJECT_GET_CLASS (camerasrc); + bclass = GST_CAMERA_SRC_GET_CLASS (camerasrc); + pspec = g_object_class_find_property (oclass, "zoom"); + + if (bclass->get_max_zoom) { + if (!bclass->get_max_zoom (camerasrc, &maxzoom)) { + maxzoom = 10.0; + } + } + + /* Update gobject property */ + if (pspec && (G_PARAM_SPEC_VALUE_TYPE (pspec) == G_TYPE_FLOAT)) { + pspec_f = G_PARAM_SPEC_FLOAT (pspec); + pspec_f->maximum = maxzoom; + GST_DEBUG_OBJECT (camerasrc, "set maximum zoom as %f", pspec_f->maximum); + /* Check if new maximum zoom is lower than current zoom level */ + if (pspec_f->maximum < camerasrc->photoconf.zoom) { + GST_DEBUG_OBJECT (camerasrc, "current zoom level too high: %f", + camerasrc->photoconf.zoom); + g_object_set (G_OBJECT (camerasrc), "zoom", pspec_f->maximum, NULL); + } + } else { + GST_WARNING_OBJECT (camerasrc, "updating maximum zoom failed"); + } +} diff --git a/gst-libs/gst/camera/gstmfldcamerasrc.h b/gst-libs/gst/camera/gstmfldcamerasrc.h new file mode 100644 index 0000000..34be3e5 --- /dev/null +++ b/gst-libs/gst/camera/gstmfldcamerasrc.h @@ -0,0 +1,484 @@ +/* GStreamer + * + * Copyright (C) 2001-2002 Ronald Bultje + * 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * + * gstcamerasrc.h: Abstract camera source base class + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_CAMSRC_H__ +#define __GST_CAMSRC_H__ + +#ifndef GST_USE_UNSTABLE_API +#define GST_USE_UNSTABLE_API +#endif + +#include +#include +#include +#include +#include + +/** + * GST_CAMERA_SRC_MAX_SIZE: + * + * Maximum frame width or height size. + */ +#define GST_CAMERA_SRC_MAX_SIZE (1<<15) /* 2^15 == 32768 */ + +G_BEGIN_DECLS + +#define GST_TYPE_CAMERA_SRC (gst_camerasrc_get_type()) +#define GST_CAMERA_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_SRC,GstCameraSrc)) +#define GST_CAMERA_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_SRC,GstCameraSrcClass)) +#define GST_CAMERA_SRC_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_CAMERA_SRC,GstCameraSrcClass)) +#define GST_IS_CAMERA_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_SRC)) +#define GST_IS_CAMERA_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_SRC)) + +#define GST_CAMERA_SRC_CAST(obj) ((GstCameraSrc *)(obj)) +#define GST_CAMERA_SRC_CLASS_CAST(klass) ((GstCameraSrcClass *)(klass)) + +#define GST_TYPE_CAMERA_SRC_CAPTURE_MODE (gst_camerasrc_capture_mode_get_type()) +#define GST_TYPE_CAMERA_SRC_VIEWFINDER_MODE (gst_camerasrc_viewfinder_mode_get_type()) + +/** + * GST_CAMERA_SRC_CAPTURE_START: + * + * Message that is sent when capturing process is about to start. + */ +#define GST_CAMERA_SRC_CAPTURE_START "photo-capture-start" + +/** + * GST_CAMERA_SRC_CAPTURE_END: + * + * Message that is sent when capturing process has finished. + */ +#define GST_CAMERA_SRC_CAPTURE_END "photo-capture-end" + +/** + * GST_CAMERA_SRC_PREVIEW_IMAGE: + * + * This message is used to send the preview image to application. Message will + * contain one field called "buffer" which will hold a GstBuffer containing + * the preview image data. + */ +#define GST_CAMERA_SRC_PREVIEW_IMAGE "photo-capture-preview" + +/** +* GST_CAMERA_SRC_CAF_STATUS: +* +* Continuous autofocus algorithm status update message. +*/ +#define GST_CAMERA_SRC_CAF_STATUS "caf-update" + +typedef enum { + CAMERASRC_AUTO_FOCUS_RESULT_FOCUSED = 2, /**< Focused.*/ + CAMERASRC_AUTO_FOCUS_RESULT_FAILED, /**< AF failed.*/ + CAMERASRC_AUTO_FOCUS_RESULT_NUM, /**< Number of AF result*/ +}camerasrc_auto_focus_result_t; + + +/** + * GstCameraSrcCaptureMode: + * @GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER: Viewfinder is running. + * @GST_CAMERA_SRC_CAPTURE_MODE_STILL: Still image capture mode. + * @GST_CAMERA_SRC_CAPTURE_MODE_VIDEO: Video capture mode. + * + * Camera element capturing modes. + */ +typedef enum { + GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER, + GST_CAMERA_SRC_CAPTURE_MODE_STILL, + GST_CAMERA_SRC_CAPTURE_MODE_VIDEO +} GstCameraSrcCaptureMode; + + +/** + * GstCameraSrcViewfinderMode: + * @GST_CAMERA_SRC_VIEWFINDER_MODE_STILL: Still image capture VF mode. + * @GST_CAMERA_SRC_VIEWFINDER_MODE_VIDEO: Video capture VF mode. + * + * Defines in which mode viewfinder should be run. + */ +typedef enum { + GST_CAMERA_SRC_VIEWFINDER_MODE_STILL, + GST_CAMERA_SRC_VIEWFINDER_MODE_VIDEO +} GstCameraSrcViewfinderMode; + + +/** + * GstCameraSrcAFReqMode: + * @AF_NONE_REQUESTED: No change to AF mode requested. + * @AF_OFF_REQUESTED: AF should be turned off. + * @AF_ON_REQUESTED: AF should be turned on. + * + * Requested autofocus status. + */ +typedef enum { + AF_NONE_REQUESTED = 0, + AF_OFF_REQUESTED, + AF_ON_REQUESTED +} GstCameraSrcAFReqMode; + + +/** + * GstCameraCapturePhase: + * @GST_CAMERA_OFF: photo capturing functionality disabled. + * @GST_CAMERA_VIEWFINDER: element is running in viewfinder mode. + * @GST_CAMERA_AUTOFOCUS: element is running autofocus phase. + * @GST_CAMERA_AUTOFOCUS_DONE: autofocus phase has ended. + * @GST_CAMERA_CAPTURE_START: element is preparing to capture HQ image. + * @GST_CAMERA_CAPTURE: element is capturing HQ image. + * @GST_CAMERA_CAPTURE_DONE: finishing HQ capture operation. + * + * Photo capturing phases. + */ +typedef enum { + GST_CAMERA_OFF = 0, + GST_CAMERA_VIEWFINDER, + GST_CAMERA_AUTOFOCUS, + GST_CAMERA_AUTOFOCUS_DONE, + GST_CAMERA_CAPTURE_START, + GST_CAMERA_CAPTURE, + GST_CAMERA_CAPTURE_DONE +} GstCameraCapturePhase; + + +typedef struct _GstCameraSrc GstCameraSrc; +typedef struct _GstCameraSrcClass GstCameraSrcClass; +typedef struct _GstCameraFocusStatus GstCameraFocusStatus; + + +typedef struct { + int x_left; + int x_right; + int y_top; + int y_bottom; + int weight; +} GstCameraSrc3a_window; + +typedef struct +{ + GstCameraControlExifInfo exif_info; + GstCameraSrc3a_window aaa_window; +} GstCameraControlInt; + + +/** + * GstCameraSrc: + * @element: the parent element. + * + * Opaque #GstCameraSrc object. + */ +struct _GstCameraSrc { + GstPushSrc element; + + /*< private >*/ + GstCaps *vf_caps; + GList *colors; + GList *camera_controls; + + /* TRUE if outgoing buffer should be always duplicated */ + gboolean always_copy; + + /* Current video device format */ + guint current_w, current_h; /* current capture frame size */ + guint fps_d, fps_n; /* framerate if device is open */ + guint32 current_fourcc; /* current color format */ + GstClockTime duration; /* duration of one frame */ + + /* Photo interface -related */ + GstPhotoSettings photoconf; /* Caches the settings */ + GMutex *state_lock; + GMutex *af_lock; + GstPhotoCapturePrepared prep_func; + gpointer prep_udata; + + /* camera control interface related */ + GstCameraControlInt cam_ctrl; + + /* Image capture format */ + guint capture_w, capture_h; + guint capture_fps_n, capture_fps_d; + guint32 capture_fourcc; + guint32 capture_count; + guint32 capture_counter; + gboolean req_negotiation; + gboolean signal_still_capture; + gboolean capture_resolution_set; /* Is capture resolution set already? */ + + /* Preview image format */ + guint preview_w, preview_h; + guint32 preview_fourcc; + gboolean preview_resolution_set; /* Is preview resolution set already? */ + + GstCameraSrcCaptureMode capture_mode; + GstCameraSrcViewfinderMode viewfinder_mode; + + /* Protected with state_lock */ + GstCameraCapturePhase photo_capture_phase; + + //guint frame_byte_size; + + guint8 requested_af_mode; + gboolean maker_note; + gboolean enable_torch; +}; + + +/** + * GstCameraSrcClass: + * @parent_class: the parent class structure. + * @open: Called when subclass should open the video device. + * @is_open: Called to get information if video device is currently open. + * @close: Called when subclass should close video device and free resources. + * @get_attribute: Called to get V4L2 attribute from device. + * @set_attribute: Called to set V4L2 attribute to device. + * @set_capture: Called to set VF/capture/preview resolution & format. + * @start: Capturing should start. + * @is_active: Ask subclass whether capturing is ongoing. + * @grab_frame: Ask the subclass to capture a frame. + * @stop: Subclass should stop capturing frames from device. + * @get_caps: Ask the subclass for supported output (viewfinder) caps. + * @get_num_buffers: Ask the subclass how many video buffers it is using. + * @unlock: Tell the subclass to stop any pending operation to video device. + * @unlock_stop: Clear previous unlock request. + * @fill_image_tags: Called after image capturing to retrieve image metadata. + * @get_preview_image: Called after image capture to retrieve preview image. + * @event: Used for passing custom events to subclass. + * @get_capabilities: Ask what capabilities subclass has. + * @set_vfinder_mode: Set viewfinder mode. + * @set_capture_mode: Set capturing mode. + * @set_autofocus: Turn on / off autofocus algorithm. + * @set_autoexposure: Turn on / off auto exposure algorithm. + * @write_settings: Write all GstPhotoSettings to subclass at once. + * @read_settings: Read all device settings to given GstPhotoSettings structure. + * @set_zoom: Set the zoom factor. + * @get_max_zoom: Ask for maximum zoom factor that can be used. + * @get_supported_caps: Ask subclass about supported caps for given mode. + * @get_makernote: Get driver's makernote part + * @makernote_init: Init makernote + * @makernote_deal: The main part for deal with makernote + * @makernote_uninit: Uninit makernote + * #GstCameraSrc class object. + */ +struct _GstCameraSrcClass +{ + GstPushSrcClass parent_class; + + /*< public >*/ + /* virtual methods for subclasses */ + gboolean (*open) (GstCameraSrc *camsrc); + + gboolean (*is_open) (GstCameraSrc *camsrc); + + gboolean (*close) (GstCameraSrc *camsrc); + + /* attribute control */ + gboolean (*get_attribute) (GstCameraSrc *camsrc, int attribute, + int *value); + + gboolean (*set_attribute) (GstCameraSrc *camsrc, int attribute, + const int value); + + gboolean (*set_capture) (GstCameraSrc *camsrc, + GstOperationMode mode, + gboolean try_only, + guint32 *pixelformat, + guint *width, guint32 *height, + guint *fps_n, guint *fps_d); + + gboolean (*start) (GstCameraSrc *camsrc, GstCaps *caps); + + gboolean (*is_active) (GstCameraSrc *camsrc); + + GstFlowReturn (*grab_frame) (GstCameraSrc *camsrc, GstBuffer **buf, + GstCameraCapturePhase phase); + + gboolean (*stop) (GstCameraSrc *camsrc); + + GstCaps* (*get_caps) (GstCameraSrc *camsrc); + + guint (*get_num_buffers) (GstCameraSrc *camsrc); + + gboolean (*unlock) (GstCameraSrc *camsrc); + + gboolean (*unlock_stop) (GstCameraSrc *camsrc); + + gboolean (*fill_image_tags) (GstCameraSrc *camsrc, GstTagList *tlist); + + gboolean (*get_preview_image) (GstCameraSrc *camsrc, GstBuffer **buf); + + gboolean (*event) (GstCameraSrc *camsrc, GstEvent *event); + + /* FORMER DRIVER-API */ + + GstPhotoCaps + (*get_capabilities) (GstCameraSrc *camsrc); + + gboolean + (*set_vfinder_mode) (GstCameraSrc *camsrc, + GstCameraSrcViewfinderMode mode); + + gboolean + (*set_capture_mode) (GstCameraSrc *camsrc, + GstCameraSrcCaptureMode mode); + + gboolean + (*set_strobe_state) (GstCameraSrc *camsrc, gboolean state); + + + gboolean + (*set_autofocus) (GstCameraSrc *camsrc, gboolean on_off); + + gboolean + (*set_autoexposure) (GstCameraSrc *camsrc, gboolean on_off); + + gboolean + (*write_settings) (GstCameraSrc *camsrc, + GstPhotoSettings *photoconf, + gboolean scene_mode_override); + + gboolean + (*read_settings) (GstCameraSrc *camsrc, GstPhotoSettings *photoconf); + + gboolean + (*set_zoom) (GstCameraSrc *camsrc, gfloat zoomfactor); + + gboolean + (*set_AeAafwindow) (GstCameraSrc *camsrc, GstCameraSrc3a_window window); + + gboolean + (*set_ae_mode) (GstCameraSrc *camsrc, int mode); + + gboolean + (*get_ae_mode) (GstCameraSrc *camsrc, int *mode); + + gboolean + (*get_max_zoom) (GstCameraSrc *camsrc, gfloat *maxzoom); + + GstCaps * + (*get_supported_caps) (GstCameraSrc *camsrc, GstOperationMode mode); + + gboolean + (*get_makernote) (GstCameraSrc *camsrc, unsigned char *buf, unsigned size); + + gboolean + (*makernote_init)(GstCameraSrc * camsrc, + unsigned * buf_size, + unsigned num_afwindows, + unsigned num_faces, + unsigned num_eyes, + unsigned num_grid, + int *handle); + gboolean + (*makernote_deal)(GstCameraSrc * camsrc, + GstBuffer *pmakerbuf, + unsigned num_afwindows, + unsigned num_grid, + int handle); + gboolean + (*makernote_uninit)(GstCameraSrc * camsrc, int handle); + gboolean + (*read_exif) (GstCameraSrc *camsrc, + GstCameraControlExifInfo *exif_info); + + gboolean + (*set_flash_mode)(GstCameraSrc *camsrc, int value); + + /* signals */ + void (*nego_complete) (GstElement *element); + void (*still_capture) (GstElement *element, GstBuffer *main, GstBuffer *sub, GstBuffer *scrnl); + + + /*< private >*/ + gpointer _gst_reserved[GST_PADDING_LARGE]; +}; + + +/** + * GstCameraFocusStatus: + * @status: GstFocusStatus indicating the status of focus operation. + * @windows: Table of integers defining the focused windows. + * @num_windows: Number of items in the @windows table. + * @focus_rows: Number of rows in window matrix. + * @focus_columns: Number of columns in window matrix. + * @coverage: Percentage of viewfinder area being used for focusing. + */ +struct _GstCameraFocusStatus +{ + GstFocusStatus status; + guint *windows; + guint num_windows; + guint8 focus_rows; + guint8 focus_columns; + guint coverage; +}; + + +GType gst_camerasrc_get_type (void); + +void gst_camerasrc_add_color_channel (GstCameraSrc *camsrc, + GstColorBalanceChannel *channel); + +void gst_camerasrc_clear_color_channels (GstCameraSrc *camsrc); + + +void gst_camerasrc_add_cameracontrol_channel (GstCameraSrc *camsrc, + GstCameraControlChannel *channel); + +void gst_camerasrc_clear_cameracontrol_channels (GstCameraSrc *camsrc); + +void gst_camerasrc_send_capture_start_message (GstCameraSrc *camsrc); + +void gst_camerasrc_send_capture_stop_message (GstCameraSrc *camsrc); + +void gst_camerasrc_af_update (GstCameraSrc *camsrc, GstCameraFocusStatus *fs); + +void gst_camerasrc_caf_update (GstCameraSrc *camsrc, GstCameraFocusStatus *fs); + +void gst_camerasrc_shake_update (GstCameraSrc *camsrc, GstPhotoShakeRisk risk); + +GstCaps * +gst_camerasrc_get_caps_from_info (GstCameraSrc *camsrc, guint32 fourcc, + guint width, guint height, guint fps_n, guint fps_d); + +const gchar * gst_camerasrc_exposure_mode_from_exif_value (gint value); + +const gchar * gst_camerasrc_scene_capture_type_from_exif_value (gint value); + +const gchar * gst_camerasrc_gain_adjustment_from_exif_value (gint value); + +const gchar * gst_camerasrc_contrast_from_exif_value (gint value); + +const gchar * gst_camerasrc_saturation_from_exif_value (gint value); + +const gchar * gst_camerasrc_flash_mode_from_exif_value (gint value); + +const gchar * gst_camerasrc_sharpness_from_exif_value (gint value); + +const gchar * gst_camerasrc_metering_mode_from_exif_value (gint value); + +const gchar * gst_camerasrc_file_source_from_exif_value (gint value); + +int gst_camerasrc_send_af_status(GstCameraSrc *camsrc , int state); + +G_END_DECLS + +#endif /* __GST_CAMSRC_H__ */ diff --git a/gst/Makefile.am b/gst/Makefile.am index fbe914b..bdc615b 100644 --- a/gst/Makefile.am +++ b/gst/Makefile.am @@ -1,2 +1,2 @@ -SUBDIRS = v4l2newcam -DIST_SUBDIRS= v4l2newcam +SUBDIRS = mfldv4l2cam +DIST_SUBDIRS= mfldv4l2cam diff --git a/gst/mfldv4l2cam/Makefile.am b/gst/mfldv4l2cam/Makefile.am new file mode 100644 index 0000000..8ac1260 --- /dev/null +++ b/gst/mfldv4l2cam/Makefile.am @@ -0,0 +1,32 @@ +plugin_LTLIBRARIES = libgstmfldv4l2cam.la + +libgstmfldv4l2camincludedir = \ + $(includedir)/gstreamer-@GST_MAJORMINOR@/gst + +libgstmfldv4l2cam_la_SOURCES = gstv4l2camvidorient.c \ + gstv4l2camsrc.c \ + v4l2camsrc_calls.c + +libgstmfldv4l2cam_la_CFLAGS = -I$(top_builddir)/gst-libs \ + -I$(top_builddir)/gst-libs/atomisphal \ + $(GST_CFLAGS) \ + $(GST_BASE_CFLAGS) \ + $(GST_CONTROLLER_CFLAGS) \ + $(GST_BAD_CFLAGS) \ + $(LIBMFLDADVCI_CFLAGS) \ + -DGST_USE_UNSTABLE_API + +libgstmfldv4l2cam_la_LIBADD = $(top_builddir)/gst-libs/gst/camera/libgstmfldcamera-$(GST_MAJORMINOR).la \ + $(top_builddir)/gst-libs/atomisphal/libgstatomisphal-$(GST_MAJORMINOR).la \ + $(GST_LIBS) \ + $(GST_BASE_LIBS) \ + $(GST_CONTROLLER_LIBS) \ + $(GST_BAD_LIBS) \ + -lgstinterfaces-$(GST_MAJORMINOR) \ + -lgstphotography-$(GST_MAJORMINOR) + + +libgstmfldv4l2cam_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBMFLDADVCI_LIBS) +libgstmfldv4l2cam_la_LIBTOOLFLAGS = --tag=disable-static + +noinst_HEADERS = v4l2camsrc_calls.h gstv4l2camvidorient.h diff --git a/gst/mfldv4l2cam/gstv4l2camsrc.c b/gst/mfldv4l2cam/gstv4l2camsrc.c new file mode 100644 index 0000000..dcdee0e --- /dev/null +++ b/gst/mfldv4l2cam/gstv4l2camsrc.c @@ -0,0 +1,1874 @@ +/* GStreamer V4L2 camera source + * Copyright (C) 2010 Nokia Corporation + * 2010 Intel Corporation + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:element-mfldcamsrc + * @short_description: Medfield V4L2 camera source + * @see_also: #GstCameraSrc + * + * + * + * Bla bla... + * + * + * Foo bar + * + * Example launch line + * + * + * gst-launch mfldv4l2camsrc ! xvimagesink + * + * + * + */ + +#include + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include "gstv4l2camsrc.h" +#include "v4l2camsrc_calls.h" +#include "gstv4l2camvidorient.h" +#include + +#define MFLD_ADVCI_PATH "/usr/lib/" +#define MFLD_V4L2CAMSRC_VERSION "85de990a519ae021f0bf4ec89c0e352ec76f6965" +#define FOCUS_POSITION_MIN 0 +#define FOCUS_POSITION_MAX 512 + +GST_DEBUG_CATEGORY (gst_v4l2camsrc_debug); +#define GST_CAT_DEFAULT gst_v4l2camsrc_debug + +static gboolean gst_v4l2camsrc_is_open (GstCameraSrc * camsrc); +static void gst_v4l2camsrc_finalize (GObject * object); +static void gst_v4l2camsrc_dispose (GObject * object); +static void gst_v4l2camsrc_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_v4l2camsrc_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); +static gboolean gst_v4l2camsrc_unlock (GstCameraSrc * src); +static gboolean gst_v4l2camsrc_unlock_stop (GstCameraSrc * src); +static gboolean gst_v4l2camsrc_set_zoom (GstCameraSrc * camsrc, gfloat zoom); +static gboolean gst_v4l2camsrc_set_ae_mode (GstCameraSrc * camsrc, int mode); +static gboolean gst_v4l2camsrc_get_ae_mode (GstCameraSrc * camsrc, int *mode); + +static gboolean gst_v4l2camsrc_iface_supported (GstImplementsInterface * iface, + GType iface_type); +static gboolean gst_v4l2camsrc_get_makernote (GstCameraSrc * camsrc, unsigned char *buf, unsigned size); +static gboolean gst_v4l2camsrc_makernote_init(GstCameraSrc * camsrc, + unsigned * buf_size, + unsigned num_afwindows, + unsigned num_faces, + unsigned num_eyes, + unsigned num_grid, + int *handle); +static gboolean gst_v4l2camsrc_makernote_deal(GstCameraSrc * camsrc, + GstBuffer *pmakerbuf, + unsigned num_afwindows, + unsigned num_grid, + int handle); +static gboolean gst_v4l2camsrc_makernote_uninit(GstCameraSrc * camsrc, + int handle); + +typedef enum +{ + PROP_0, + PROP_DEVICE, + PROP_DEVICE_FD, + PROP_DEVICE_NAME, + PROP_QUEUE_SIZE, + PROP_INPUT_SENSOR, + PROP_USE_MMAP, + PROP_USE_COPY, + PROP_AE, + PROP_AE_METERING_MODE, + PROP_AE_WINDOW, + PROP_AE_MODE, + PROP_AF, + PROP_AF_METERING_MODE, + PROP_AF_WINDOW, + PROP_AWB, + PROP_STILL_AF, + PROP_FOCUS_POSITION, + PROP_VFLIP, + PROP_HFLIP, + PROP_BAYER_DOWNSCALING, + PROP_CAPTURE_CORRECTION_GDC, + PROP_CAPTURE_CORRECTION_CAC, + PROP_CAPTURE_CORRECTION_RER, + PROP_CAPTURE_CORRECTION_DIS, + PROP_CAPTURE_CORRECTION_DVS, + PROP_CAPTURE_CORRECTION_EDGE_ENHANCEMENT, + PROP_CAPTURE_CORRECTION_SHADING_CORRECTION, + PROP_CAPTURE_CORRECTION_BLACK_LEVEL_COMPENSATION, + PROP_CAPTURE_CORRECTION_BAD_PIXEL_DETECTION, + PROP_CAPTURE_CORRECTION_GAMMA, + PROP_CAPTURE_CORRECTION_CONTRAST, + PROP_CAPTURE_CORRECTION_BRIGHTNESS, + PROP_DUMP_RAW, + PROP_DUMP_IMAGE, + PROP_DEBUG_FLAGS, + PROP_DISABLE_LOW_RES_CROP, +} GstV4L2CamSrcProperties; + + + + +#define DEFAULT_PROP_DEVICE_NAME NULL +#define DEFAULT_PROP_DEVICE "/dev/video0" +#define DEFAULT_PROP_DEVICE_FD -1 +#define DEFAULT_PROP_AE_WINDOW "x_left=0,x_right=0,y_bottom=0,y_top=0" +#define DEFAULT_PROP_AF_WINDOW "x_left=0,x_right=0,y_bottom=0,y_top=0" +#define DEFAULT_DEBUG_FLAGS 0 +#define C_FLAGS(v) ((guint) v) + + +static const char* surface_string = + "video/x-vaapi-sharing, " + "type = vaapi, " + "width = (int) [ 1, MAX ], " + "height = (int) [ 1, MAX ], " + "framerate = (fraction) [ 0, MAX ]"; + + +GType +gst_camera_input_sensor_get_type (void) +{ + static GType gst_camera_input_sensor_type = 0; + static GEnumValue gst_camera_input_sensors[] = { + {GST_CAMERA_INPUT_SENSOR_PRIMARY, "Primary Sensor", "primary"}, + {GST_CAMERA_INPUT_SENSOR_SECONDARY, "Sencondary Sensor", "second"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camera_input_sensor_type)) { + gst_camera_input_sensor_type = + g_enum_register_static ("GstCameraInputSensor", + gst_camera_input_sensors); + } + return gst_camera_input_sensor_type; +} + +GType +gst_camera_ae_metering_mode_get_type(void) +{ + static GType gst_camera_ae_metering_mode_type = 0; + static GEnumValue gst_camera_ae_metering_mode[] = { + {GST_CAMERA_AE_METERING_AUTO, "AE auto metering", "auto"}, + {GST_CAMERA_AE_METERING_SPOT, "AE spot metering", "spot"}, + {GST_CAMERA_AE_METERING_CENTER, "AE center metering", "center"}, + {GST_CAMERA_AE_METERING_CUSTOMIZED, "AE customized metering", "customized"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camera_ae_metering_mode_type)) { + gst_camera_ae_metering_mode_type= + g_enum_register_static ("GstCameraAEMeteringMode", + gst_camera_ae_metering_mode); + } + return gst_camera_ae_metering_mode_type; +} + +GType +gst_camera_ae_mode_get_type(void) +{ + static GType gst_camera_ae_mode_type = 0; + static GEnumValue gst_camera_ae_mode[] = { + {GST_CAMERA_AE_MODE_AUTO, "AE auto", "auto"}, + {GST_CAMERA_AE_MODE_MANUAL, "AE manual", "manual"}, + {GST_CAMERA_AE_MODE_SHUTTER_PRIORITY, "AE shutter priority", "shutter"}, + {GST_CAMERA_AE_MODE_APERTURE_PRIORITY, "AE aperture priority", "aperture"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camera_ae_mode_type)) { + gst_camera_ae_mode_type= + g_enum_register_static ("GstCameraAEMode", + gst_camera_ae_mode); + } + return gst_camera_ae_mode_type; +} + +GType +gst_camera_af_metering_mode_get_type(void) +{ + static GType gst_camera_af_metering_mode_type = 0; + static GEnumValue gst_camera_af_metering_mode[] = { + {GST_CAMERA_AF_METERING_AUTO, "AF auto metering", "auto"}, + {GST_CAMERA_AF_METERING_SPOT, "AF spot metering", "spot"}, + {GST_CAMERA_AF_METERING_CENTER, "AF center metering", "center"}, + {GST_CAMERA_AF_METERING_CUSTOMIZED, "AF customized metering", "customized"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camera_af_metering_mode_type)) { + gst_camera_af_metering_mode_type= + g_enum_register_static ("GstCameraAFMeteringMode", + gst_camera_af_metering_mode); + } + return gst_camera_af_metering_mode_type; +} + +GType +gst_camerasrc_debug_flags_get_type (void) +{ + static GType gst_camerasrc_debug_flags = 0; + static const GFlagsValue values [] = { + {C_FLAGS (GST_CAMERASRC_DEBUG_FLAGS_PERFORMANCE), "Debug flags for performance tuning", + "performance"}, + {C_FLAGS (GST_CAMERASRC_DEBUG_FLAGS_MAKER_NOTE), "Debug flags for maker note", + "maker-note"}, + {C_FLAGS (GST_CAMERASRC_DEBUG_FLAGS_AUTO_FOCUS), "Debug flags for auto focus", + "auto-focus"}, + {0, NULL, NULL}, + }; + + if (G_UNLIKELY (!gst_camerasrc_debug_flags)) { + gst_camerasrc_debug_flags = + g_flags_register_static ("GstCameraSrcDebugFlags", values); + } + return gst_camerasrc_debug_flags; +} + + + +GST_IMPLEMENT_V4L2CAMSRC_VIDORIENT_METHODS (GstMFLDV4l2CamSrc, gst_v4l2camsrc); + + +static void +gst_v4l2camsrc_interface_init (GstImplementsInterfaceClass * klass) +{ + /* + * default virtual functions + */ + klass->supported = gst_v4l2camsrc_iface_supported; +} + +void +gst_v4l2camsrc_init_interfaces (GType type) +{ + static const GInterfaceInfo v4l2camsrc_iface_info = { + (GInterfaceInitFunc) gst_v4l2camsrc_interface_init, + NULL, + NULL, + }; + static const GInterfaceInfo v4l2camsrc_videoorientation_info = { + (GInterfaceInitFunc) gst_v4l2camsrc_video_orientation_interface_init, + NULL, + NULL, + }; + + g_type_add_interface_static (type, + GST_TYPE_IMPLEMENTS_INTERFACE, &v4l2camsrc_iface_info); + g_type_add_interface_static (type, + GST_TYPE_VIDEO_ORIENTATION, &v4l2camsrc_videoorientation_info); +} + + +GST_BOILERPLATE_FULL (GstMFLDV4l2CamSrc, gst_v4l2camsrc, GstCameraSrc, + GST_TYPE_CAMERA_SRC, gst_v4l2camsrc_init_interfaces); + + + +static gboolean +gst_v4l2camsrc_iface_supported (GstImplementsInterface * iface, + GType iface_type) +{ + GstCameraSrc *camsrc = GST_CAMERA_SRC (iface); + + if (gst_v4l2camsrc_is_open (camsrc) && + iface_type == GST_TYPE_VIDEO_ORIENTATION) { + return TRUE; + } + + else if (GST_IS_IMPLEMENTS_INTERFACE (camsrc)) { + GstImplementsInterfaceClass *parent_klass; + + parent_klass = + g_type_interface_peek (parent_class, GST_TYPE_IMPLEMENTS_INTERFACE); + return parent_klass->supported (iface, iface_type); + } + + return FALSE; +} + + +/* + * gst_v4l2camsrc_is_open: + * + */ +static gboolean +gst_v4l2camsrc_is_open (GstCameraSrc * camsrc) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + return GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc); +} + +/* + * gst_v4l2camsrc_is_active: + * + */ +static gboolean +gst_v4l2camsrc_is_active (GstCameraSrc * camsrc) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + return GST_V4L2CAMSRC_IS_ACTIVE (v4l2camsrc); +} + +/* + * gst_v4l2camsrc_v4l2fourcc_to_structure: + * + */ +static GstStructure * +gst_v4l2camsrc_v4l2fourcc_to_structure (guint32 fourcc) +{ + GstStructure *structure = NULL; + + switch (fourcc) { + case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */ + case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */ + structure = gst_structure_new ("image/jpeg", NULL); + break; + case V4L2_PIX_FMT_RGB332: + case V4L2_PIX_FMT_RGB555: + case V4L2_PIX_FMT_RGB555X: + case V4L2_PIX_FMT_RGB565: + case V4L2_PIX_FMT_RGB565X: + case V4L2_PIX_FMT_RGB24: + case V4L2_PIX_FMT_BGR24: + case V4L2_PIX_FMT_RGB32: + case V4L2_PIX_FMT_BGR32:{ + guint depth = 0, bpp = 0; + + gint endianness = 0; + + guint32 r_mask = 0, b_mask = 0, g_mask = 0; + + switch (fourcc) { + case V4L2_PIX_FMT_RGB332: + bpp = depth = 8; + endianness = G_BYTE_ORDER; /* 'like, whatever' */ + r_mask = 0xe0; + g_mask = 0x1c; + b_mask = 0x03; + break; + case V4L2_PIX_FMT_RGB555: + case V4L2_PIX_FMT_RGB555X: + bpp = 16; + depth = 15; + endianness = + fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN; + r_mask = 0x7c00; + g_mask = 0x03e0; + b_mask = 0x001f; + break; + case V4L2_PIX_FMT_RGB565: + case V4L2_PIX_FMT_RGB565X: + bpp = depth = 16; + endianness = + fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN; + r_mask = 0xf800; + g_mask = 0x07e0; + b_mask = 0x001f; + break; + case V4L2_PIX_FMT_RGB24: + bpp = depth = 24; + endianness = G_BIG_ENDIAN; + r_mask = 0xff0000; + g_mask = 0x00ff00; + b_mask = 0x0000ff; + break; + case V4L2_PIX_FMT_BGR24: + bpp = depth = 24; + endianness = G_BIG_ENDIAN; + r_mask = 0x0000ff; + g_mask = 0x00ff00; + b_mask = 0xff0000; + break; + case V4L2_PIX_FMT_RGB32: + bpp = depth = 32; + endianness = G_BIG_ENDIAN; + r_mask = 0xff000000; + g_mask = 0x00ff0000; + b_mask = 0x0000ff00; + break; + case V4L2_PIX_FMT_BGR32: + bpp = depth = 32; + endianness = G_BIG_ENDIAN; + r_mask = 0x000000ff; + g_mask = 0x0000ff00; + b_mask = 0x00ff0000; + break; + default: + g_assert_not_reached (); + break; + } + structure = gst_structure_new ("video/x-raw-rgb", + "bpp", G_TYPE_INT, bpp, + "depth", G_TYPE_INT, depth, + "red_mask", G_TYPE_INT, r_mask, + "green_mask", G_TYPE_INT, g_mask, + "blue_mask", G_TYPE_INT, b_mask, + "endianness", G_TYPE_INT, endianness, NULL); + break; + } + case V4L2_PIX_FMT_GREY: /* 8 Greyscale */ + structure = gst_structure_new ("video/x-raw-gray", + "bpp", G_TYPE_INT, 8, NULL); + break; + case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */ + case V4L2_PIX_FMT_HI240: /* 8 8-bit color */ + /* FIXME: get correct fourccs here */ + break; + case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */ + case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */ + case V4L2_PIX_FMT_NV61: + case V4L2_PIX_FMT_NV16: + case V4L2_PIX_FMT_YVU410: + case V4L2_PIX_FMT_YUV410: + case V4L2_PIX_FMT_YUV420: /* I420/IYUV */ + case V4L2_PIX_FMT_YUYV: + case V4L2_PIX_FMT_YVU420: + case V4L2_PIX_FMT_UYVY: + case V4L2_PIX_FMT_Y41P: + case V4L2_PIX_FMT_YUV422P: + case V4L2_PIX_FMT_YUV444: +#ifdef V4L2_PIX_FMT_YVYU + case V4L2_PIX_FMT_YVYU: +#endif + case V4L2_PIX_FMT_YUV411P:{ + guint32 fcc = 0; + + switch (fourcc) { + case V4L2_PIX_FMT_NV12: + fcc = GST_MAKE_FOURCC ('N', 'V', '1', '2'); + break; + case V4L2_PIX_FMT_NV21: + fcc = GST_MAKE_FOURCC ('N', 'V', '2', '1'); + break; + case V4L2_PIX_FMT_NV16: + fcc = GST_MAKE_FOURCC ('N', 'V', '1', '6'); + break; + case V4L2_PIX_FMT_NV61: + fcc = GST_MAKE_FOURCC ('N', 'V', '6', '1'); + break; + case V4L2_PIX_FMT_YVU410: + fcc = GST_MAKE_FOURCC ('Y', 'V', 'U', '9'); + break; + case V4L2_PIX_FMT_YUV410: + fcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9'); + break; + case V4L2_PIX_FMT_YUV420: + fcc = GST_MAKE_FOURCC ('I', '4', '2', '0'); + break; + case V4L2_PIX_FMT_YUYV: + fcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'); + break; + case V4L2_PIX_FMT_YVU420: + fcc = GST_MAKE_FOURCC ('Y', 'V', '1', '2'); + break; + case V4L2_PIX_FMT_UYVY: + fcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'); + break; + case V4L2_PIX_FMT_Y41P: + fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'P'); + break; + case V4L2_PIX_FMT_YUV411P: + fcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B'); + break; + case V4L2_PIX_FMT_YUV422P: + fcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B'); + break; + case V4L2_PIX_FMT_YUV444: + fcc = GST_MAKE_FOURCC ('Y', '4', '4', '4'); + break; +#ifdef V4L2_PIX_FMT_YVYU + case V4L2_PIX_FMT_YVYU: + fcc = GST_MAKE_FOURCC ('Y', 'V', 'Y', 'U'); + break; +#endif + default: + g_assert_not_reached (); + break; + } + structure = gst_structure_new ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, fcc, NULL); + break; + } + case V4L2_PIX_FMT_SBGGR8: + case V4L2_PIX_FMT_SGBRG8: + case V4L2_PIX_FMT_SGRBG8: + case V4L2_PIX_FMT_SRGGB8: + case V4L2_PIX_FMT_SBGGR10: + case V4L2_PIX_FMT_SGBRG10: + case V4L2_PIX_FMT_SGRBG10: + case V4L2_PIX_FMT_SRGGB10: +#ifdef V4L2_PIX_FMT_SBGGR16 + case V4L2_PIX_FMT_SBGGR16:{ +#endif + guint32 fcc = 0; + switch (fourcc) { + case V4L2_PIX_FMT_SBGGR8: + fcc = GST_MAKE_FOURCC ('B', 'A', '8', '1'); + break; + case V4L2_PIX_FMT_SGBRG8: + fcc = GST_MAKE_FOURCC ('G', 'B', 'R', 'G'); + break; + case V4L2_PIX_FMT_SGRBG8: + fcc = GST_MAKE_FOURCC ('G', 'R', 'B', 'G'); + break; + case V4L2_PIX_FMT_SRGGB8: + fcc = GST_MAKE_FOURCC ('R', 'G', 'G', 'B'); + break; + case V4L2_PIX_FMT_SBGGR10: + fcc = GST_MAKE_FOURCC ('B', 'G', '1', '0'); + break; + case V4L2_PIX_FMT_SGBRG10: + fcc = GST_MAKE_FOURCC ('G', 'B', '1', '0'); + break; + case V4L2_PIX_FMT_SGRBG10: + fcc = GST_MAKE_FOURCC ('B', 'A', '1', '0'); + break; + case V4L2_PIX_FMT_SRGGB10: + fcc = GST_MAKE_FOURCC ('R', 'G', '1', '0'); + break; + case V4L2_PIX_FMT_SBGGR16: + fcc = GST_MAKE_FOURCC ('B', 'Y', 'R', '2'); + break; + default: + g_assert_not_reached (); + break; + } + structure = gst_structure_new ("video/x-raw-bayer", + "format", GST_TYPE_FOURCC, fcc, NULL); + break; + } + default: + GST_DEBUG ("Unknown fourcc 0x%08x %" GST_FOURCC_FORMAT, + fourcc, GST_FOURCC_ARGS (fourcc)); + break; + } + + return structure; +} + +/* + * gst_v4l2camsrc_get_caps: + * + */ +static GstCaps * +gst_v4l2camsrc_get_caps (GstCameraSrc * camsrc) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + GstStructure *template; + GstCaps *ret; + GSList *walk; + + if (!gst_v4l2camsrc_is_open (camsrc)) { + /* FIXME: should it probe the device? */ + return NULL; + } + + if (!v4l2camsrc->formats) + gst_v4l2camsrc_fill_format_list (v4l2camsrc); + + ret = gst_caps_new_empty (); + + for (walk = v4l2camsrc->formats; walk; walk = walk->next) { + struct v4l2_fmtdesc *format; + + /* FIXME: Introduce own format structure */ + format = (struct v4l2_fmtdesc *) walk->data; + + template = gst_v4l2camsrc_v4l2fourcc_to_structure (format->pixelformat); + + if (template) { + GstCaps *tmp; + + tmp = gst_v4l2camsrc_probe_caps_for_format (v4l2camsrc, + format->pixelformat, template); + if (tmp) + gst_caps_append (ret, tmp); + + gst_structure_free (template); + } else { + GST_DEBUG_OBJECT (v4l2camsrc, "unknown format %u", format->pixelformat); + } + } + + GstStructure * structure = gst_structure_from_string(surface_string, NULL); + gst_caps_append_structure (ret, structure); + + v4l2camsrc->probed_caps = gst_caps_ref (ret); + + GST_INFO_OBJECT(v4l2camsrc, "use GST_DEBUG >= 5 for probed caps"); + GST_LOG_OBJECT (v4l2camsrc, "probed caps: %" GST_PTR_FORMAT, ret); + + return ret; +} + +/* + * gst_v4l2camsrc_get_num_buffers: + * + */ +static guint +gst_v4l2camsrc_get_num_buffers (GstCameraSrc * camsrc) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + return v4l2camsrc->num_buffers; +} + +/* + * common format / caps utilities: + */ +typedef struct +{ + guint32 format; + gboolean dimensions; +} GstV4L2FormatDesc; + +static const GstV4L2FormatDesc gst_v4l2_formats[] = { + /* from Linux 2.6.15 videodev2.h */ + {V4L2_PIX_FMT_YUV420, TRUE}, + {V4L2_PIX_FMT_YVU420, TRUE}, + {V4L2_PIX_FMT_YUV422P, TRUE}, + {V4L2_PIX_FMT_YUV444, TRUE}, + + {V4L2_PIX_FMT_NV12, TRUE}, + {V4L2_PIX_FMT_NV21, TRUE}, + + {V4L2_PIX_FMT_NV16, TRUE}, + {V4L2_PIX_FMT_NV61, TRUE}, + + {V4L2_PIX_FMT_YUYV, TRUE}, + {V4L2_PIX_FMT_UYVY, TRUE}, + + {V4L2_PIX_FMT_SBGGR16, TRUE}, + + {V4L2_PIX_FMT_SBGGR8, TRUE}, + {V4L2_PIX_FMT_SGBRG8, TRUE}, + {V4L2_PIX_FMT_SGRBG8, TRUE}, + {V4L2_PIX_FMT_SRGGB8, TRUE}, + + {V4L2_PIX_FMT_SBGGR10, TRUE}, + {V4L2_PIX_FMT_SGBRG10, TRUE}, + {V4L2_PIX_FMT_SGRBG10, TRUE}, + {V4L2_PIX_FMT_SRGGB10, TRUE}, + + {V4L2_PIX_FMT_RGB24, TRUE}, + {V4L2_PIX_FMT_RGB32, TRUE}, + {V4L2_PIX_FMT_RGB565, TRUE}, +}; + +#define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats)) +#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */ + +GstCaps * +gst_v4l2camsrc_get_all_caps (void) +{ + static GstCaps *caps = NULL; + + if (caps == NULL) { + GstStructure *structure; + + guint i; + + caps = gst_caps_new_empty (); + for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) { + structure = + gst_v4l2camsrc_v4l2fourcc_to_structure (gst_v4l2_formats[i].format); + if (structure) { + if (gst_v4l2_formats[i].dimensions) { + gst_structure_set (structure, + "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE, + "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE, + "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL); + } + gst_caps_append_structure (caps, structure); + } + } + structure = gst_structure_from_string(surface_string, NULL); + gst_caps_append_structure (caps, structure); + } + + return gst_caps_ref (caps); +} + + +/* + * gst_v4l2camsrc_base_init: + * @klass: #GstElementClass. + * + */ +static void +gst_v4l2camsrc_base_init (gpointer klass) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + + gst_element_class_set_details_simple (element_class, + "V4L2 camera source", + "Video/Src", + "Video4Linux2 camera source element", + "Maemo Multimedia "); + + gst_element_class_add_pad_template (element_class, + gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, + gst_v4l2camsrc_get_all_caps ())); +} + +gboolean gst_v4l2camsrc_set_autofocus (GstCameraSrc * camsrc, gboolean on); +gboolean gst_v4l2camsrc_set_autoexposure (GstCameraSrc * camsrc, gboolean on); +GstPhotoCaps gst_v4l2camsrc_get_capabilities (GstCameraSrc * camsrc); +gboolean gst_v4l2camsrc_set_capture_mode (GstCameraSrc * camsrc, + GstCameraSrcCaptureMode mode); +gboolean +gst_v4l2camsrc_read_settings (GstCameraSrc * camsrc, + GstPhotoSettings * photoconf); +gboolean gst_v4l2camsrc_write_settings (GstCameraSrc * camsrc, + GstPhotoSettings * photoconf, gboolean scene_override); +gboolean +gst_v4l2camsrc_set_flash_mode (GstCameraSrc *camsrc, int value); +gboolean +gst_v4l2camsrc_read_exif (GstCameraSrc *camsrc, + GstCameraControlExifInfo *exif_info); +gboolean +gst_v4l2camsrc_set_strobe_state (GstCameraSrc *camsrc, gboolean state); + +gboolean +gst_v4l2camsrc_set_AeAafwindow (GstCameraSrc * camsrc, GstCameraSrc3a_window window); + +/* + * gst_v4l2camsrc_class_init: + * @klass: #GstMFLDV4l2CamSrcClass. + * + */ +static void +gst_v4l2camsrc_class_init (GstMFLDV4l2CamSrcClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstCameraSrcClass *camera_class = GST_CAMERA_SRC_CLASS (klass); + + gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_property); + gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_property); + gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_dispose); + gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_finalize); + + g_object_class_install_property (gobject_class, PROP_DEVICE, + g_param_spec_string ("device", "Device", "Device location", + DEFAULT_PROP_DEVICE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_DEVICE_NAME, + g_param_spec_string ("device-name", "Device name", + "Name of the device", DEFAULT_PROP_DEVICE_NAME, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_DEVICE_FD, + g_param_spec_int ("device-fd", "File descriptor", + "File descriptor of the device", -1, G_MAXINT, DEFAULT_PROP_DEVICE_FD, + G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_QUEUE_SIZE, + g_param_spec_uint ("queue-size", "Queue size", + "Number of buffers to be enqueud in the driver", + GST_V4L2CAMSRC_MIN_BUFFERS, GST_V4L2CAMSRC_MAX_BUFFERS, + GST_V4L2CAMSRC_DEFAULT_BUFFERS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_INPUT_SENSOR, + g_param_spec_enum ("camera-id", "Camera Id", + "Which sensor is the input of the ISP", + GST_TYPE_CAMERA_INPUT_SENSOR, + GST_CAMERA_INPUT_SENSOR_PRIMARY, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_USE_MMAP, + g_param_spec_boolean ("use-mmap", "Use Mmap", + "Whether use mmap memory method", FALSE, G_PARAM_READWRITE)); + + g_object_class_install_property (gobject_class, PROP_USE_COPY, + g_param_spec_boolean ("use-copy", "Use Copy", + "Whether copy the buffer from driver, debug only", FALSE, G_PARAM_READWRITE)); + + /* AE, AF, and AWB settings */ + g_object_class_install_property (gobject_class, PROP_AE, + g_param_spec_boolean ("ae", "Auto Exposure", + "Auto Exposure is On or Off", TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_AE_METERING_MODE, + g_param_spec_enum ("ae-metering-mode", "AE Metering Mode", + "Select AE Metering Mode", + GST_TYPE_CAMERA_AE_METERING_MODE, + GST_CAMERA_AE_METERING_AUTO, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_AE_MODE, + g_param_spec_enum ("ae-mode", "AE Mode", + "Select AE Mode", + GST_TYPE_CAMERA_AE_MODE, + GST_CAMERA_AE_MODE_AUTO, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_AE_WINDOW, + g_param_spec_string("ae-window", "AE Window", + "Set AE Window Coordinates in format: x_left=value,x_right=value," + "y_bottom=value,y_top=value", + DEFAULT_PROP_AE_WINDOW, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS )); + + + g_object_class_install_property (gobject_class, PROP_AF, + g_param_spec_boolean ("af", "Auto Focus", + "Auto Focus is On or Off", TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_AF_METERING_MODE, + g_param_spec_enum ("af-metering-mode", "AF Metering Mode", + "Select AF Metering Mode", + GST_TYPE_CAMERA_AF_METERING_MODE, + GST_CAMERA_AF_METERING_AUTO, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, PROP_AF_WINDOW, + g_param_spec_string("af-window", "AF Window", + "Set AF Window Coordinates in format: x_left=value,x_right=value," + "y_bottom=value,y_top=value", + DEFAULT_PROP_AF_WINDOW, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS )); + + g_object_class_install_property (gobject_class, PROP_AWB, + g_param_spec_boolean ("awb", "White Balance", + "White Balance is On or Off", + TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_STILL_AF, + g_param_spec_boolean ("still-af", "still image slow focus", + "Turn On or Off slow focus when doing the still image capture", + TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_FOCUS_POSITION, + g_param_spec_int ("focus-posi", "Focus Position", + "Focus absolute position set to Sensor.", FOCUS_POSITION_MIN, + FOCUS_POSITION_MAX, 0, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_BAYER_DOWNSCALING, + g_param_spec_boolean ("bayer-downscaling", "Bayer Downscaling crop", + "Turn On or Off Bayer Downscaling", TRUE, G_PARAM_READWRITE)); + + /* These are advanced ISP features for MFLD camera only */ + g_object_class_install_property (gobject_class, PROP_CAPTURE_CORRECTION_GDC, + g_param_spec_boolean ("GDC", "GDC", + "Capture Correction for Lens Geometry Distortion Correction", + FALSE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_CORRECTION_CAC, + g_param_spec_boolean ("CAC", "CAC", + "Capture Correction for Chromatic Aberration Correction", + FALSE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_CORRECTION_RER, + g_param_spec_boolean ("redeye-reduction", "Redeye reduction", + "Capture Correction for Redeye reduction", + FALSE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_CORRECTION_DIS, + g_param_spec_boolean ("still-stable", "Still stabilization", + "Capture Correction for still image stabilization", FALSE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_CORRECTION_DVS, + g_param_spec_boolean ("video-stable", "Video stabilization", + "Capture Correction for video capture stabilization", FALSE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_CAPTURE_CORRECTION_EDGE_ENHANCEMENT, + g_param_spec_boolean ("edge-enhancement", "Edge Enhancement", + "Capture Correction for edge enhancement", TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_CAPTURE_CORRECTION_SHADING_CORRECTION, + g_param_spec_boolean ("shading-correction", "Shading Correction", + "Capture Correction for shading correction", TRUE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_CAPTURE_CORRECTION_BLACK_LEVEL_COMPENSATION, + g_param_spec_boolean ("black-level-compensation", "Black Level Compensation", + "Capture Correction for Black Level Compensation", FALSE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_CAPTURE_CORRECTION_BAD_PIXEL_DETECTION, + g_param_spec_boolean ("bad-pixel-detection", "Bad Pixel Detection", + "Capture Correction for Bad Pixel Detection", TRUE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_CAPTURE_CORRECTION_GAMMA, + g_param_spec_float ("gamma", "Gamma", + "Gamma Values", 1.0, 2.4, 2.2, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_CAPTURE_CORRECTION_CONTRAST, g_param_spec_int ("contrast", + "Contrast", "Contrast Values", 0, 2048, 256, + G_PARAM_READWRITE |G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_CAPTURE_CORRECTION_BRIGHTNESS, g_param_spec_int ("brightness", + "Brightness", "Brightness Values", -255, 255, 0, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, PROP_DUMP_RAW, + g_param_spec_boolean ("dump-raw", "Dump RAW images simultanious", + "Whether dump the raw images as output when during the jpeg capture", + FALSE, G_PARAM_READWRITE)); + + g_object_class_install_property (gobject_class, PROP_DUMP_IMAGE, + g_param_spec_boolean ("dump-image", "Dump images simultanious in pipeline", + "Whether dump the images as output in pipeline, debug only," + "output the image in current directory", + FALSE, G_PARAM_READWRITE)); + + g_object_class_install_property (gobject_class, PROP_DEBUG_FLAGS, + g_param_spec_flags ("debug-flags", "debug flags", + "debug flags for development and performance tuning usage", + GST_TYPE_CAMERASRC_DEBUG_FLAGS, DEFAULT_DEBUG_FLAGS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + g_object_class_install_property (gobject_class, + PROP_VFLIP, + g_param_spec_boolean ("vflip", "Vertical Flip", + "Vertical flip", FALSE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_HFLIP, + g_param_spec_boolean ("hflip", "Horisontal Flip", + "Horisontal flip", FALSE, + G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE)); + + g_object_class_install_property (gobject_class, + PROP_DISABLE_LOW_RES_CROP, + g_param_spec_boolean ("disable-low-res-crop", "disable low resolution crop", + "disable software crop on unsupported low resolution frame size", FALSE, + G_PARAM_READWRITE)); + + camera_class->is_open = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_is_open); + camera_class->open = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_open); + camera_class->close = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_close); + camera_class->get_attribute = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_attribute); + camera_class->set_attribute = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_attribute); + camera_class->set_capture = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_capture); + camera_class->start = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_capture_start); + camera_class->is_active = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_is_active); + camera_class->grab_frame = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_grab_frame); + camera_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_capture_stop); + camera_class->get_caps = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_caps); + camera_class->get_num_buffers = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_num_buffers); + camera_class->unlock = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_unlock); + camera_class->unlock_stop = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_unlock_stop); + + camera_class->set_capture_mode = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_capture_mode); +// camera_class->set_vfinder_mode = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_set_viewfinder_mode); + camera_class->set_autofocus = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_autofocus); + camera_class->set_autoexposure = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_autoexposure); + camera_class->read_settings = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_read_settings); + camera_class->write_settings = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_write_settings); + camera_class->get_capabilities = + GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_capabilities); + camera_class->set_zoom = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_zoom); + camera_class->set_AeAafwindow = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_AeAafwindow); + camera_class->set_ae_mode = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_ae_mode); + camera_class->get_ae_mode = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_ae_mode); + + camera_class->get_makernote = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_get_makernote); + camera_class->makernote_init = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_makernote_init); + camera_class->makernote_deal = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_makernote_deal); + camera_class->makernote_uninit = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_makernote_uninit); + camera_class->set_flash_mode = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_set_flash_mode); + camera_class->read_exif = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_read_exif); + + camera_class->set_strobe_state = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_set_strobe_state); + +//camera_class->fill_image_tags = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_fill_image_tags); +//camera_class->get_preview_image = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_get_preview_image); +} + +/* + * gst_v4l2camsrc_driver_wrapper_load: + * Create the mfldadvci object. The function in libmfldadvci can + * be called from this source element now + */ +GstV4l2MFLDAdvCI * +gst_v4l2camsrc_mfldadvci_wrapper_load (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + GstV4l2MFLDAdvCI *mfldadvci; + + mfldadvci = g_new0 (GstV4l2MFLDAdvCI, 1); + wrapper_default_link_functions_init(mfldadvci); + +#ifdef USE_DYNAMIC_3A + if (g_module_supported ()) { + gchar *module_file; + module_file = g_module_build_path (MFLD_ADVCI_PATH, "libmfldadvci.so.0"); + GST_DEBUG_OBJECT (v4l2camsrc, "Loading %s", module_file); + + v4l2camsrc->module = g_module_open (module_file, G_MODULE_BIND_LOCAL); + + /* Open again if libmfldadvci.so.0 doesn't exist */ + if (!v4l2camsrc->module) { + module_file = g_module_build_path (MFLD_ADVCI_PATH, "libmfldadvci.so"); + v4l2camsrc->module = g_module_open (module_file, G_MODULE_BIND_LOCAL); + GST_DEBUG_OBJECT (v4l2camsrc, "Loading %s", module_file); + } + GST_DEBUG_OBJECT(v4l2camsrc, "Camera Source Interface version is %d\n", LIBMFLDADVCI_VERSION); + if (v4l2camsrc->module) { + lib_3a_link_functions_init(mfldadvci, v4l2camsrc->module); + } + } +#endif /* USE_DYNAMIC_3A */ + + return mfldadvci; +} + +/* + * gst_v4l2camsrc_mfldadvci_wrapper_unload: + * Unload the libmfldadvci and free its resource + */ +void +gst_v4l2camsrc_mfldadvci_wrapper_unload (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + GstV4l2MFLDAdvCI *mfldadvci = v4l2camsrc->mfldadvci; + g_module_close (v4l2camsrc->module); + v4l2camsrc->module = NULL; + + g_free (mfldadvci); + +} + + + +/* + * gst_v4l2camsrc_init: + * @v4l2camsrc: #GstMFLDV4l2CamSrc. + * @klass: #GstMFLDV4l2CamSrcClass. + * + */ +static void +gst_v4l2camsrc_init (GstMFLDV4l2CamSrc * v4l2camsrc, + GstMFLDV4l2CamSrcClass * klass) +{ + v4l2camsrc->num_buffers = GST_V4L2CAMSRC_DEFAULT_BUFFERS; + v4l2camsrc->tmp_num_buffers = v4l2camsrc->num_buffers; + v4l2camsrc->videodev = g_strdup (DEFAULT_PROP_DEVICE); + v4l2camsrc->video_fd = DEFAULT_PROP_DEVICE_FD; + v4l2camsrc->poll = gst_poll_new (TRUE); + v4l2camsrc->buffer = NULL; + v4l2camsrc->crop_supported = FALSE; + v4l2camsrc->max_zoom_factor = MAX_RESIZER_FACTOR; + v4l2camsrc->zoom_factor = DEFAULT_RESIZER_FACTOR; + v4l2camsrc->use_mmap = TRUE; + v4l2camsrc->use_copy = FALSE; + v4l2camsrc->capture_mode = GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER; + /* MFLD camera Advanced features status */ + v4l2camsrc->gdc_enabled = FALSE; + v4l2camsrc->rer_enabled = FALSE; + v4l2camsrc->cac_enabled = FALSE; + v4l2camsrc->dvs_enabled = FALSE; + v4l2camsrc->dis_enabled = FALSE; + v4l2camsrc->ee_enabled = TRUE; + v4l2camsrc->sc_enabled = TRUE; + v4l2camsrc->cc_updated = FALSE; + v4l2camsrc->gamma_updated = FALSE; + v4l2camsrc->ae_enabled = TRUE; + v4l2camsrc->af_enabled = TRUE; + v4l2camsrc->awb_enabled = TRUE; + v4l2camsrc->still_af = FALSE; + v4l2camsrc->bayer_downscaling = FALSE; + v4l2camsrc->tone.gamma = 2.2; + v4l2camsrc->tone.brightness = 0; + v4l2camsrc->tone.contrast = 256; + + v4l2camsrc->preflash_enabled = FALSE; + v4l2camsrc->capflash_enabled = FALSE; + v4l2camsrc->preflash_analoggain = 0; + + v4l2camsrc->dump_raw = FALSE; + v4l2camsrc->dump_image = FALSE; + v4l2camsrc->raw_output_size = 0; + v4l2camsrc->debug_flags = DEFAULT_DEBUG_FLAGS; + v4l2camsrc->disable_low_res_crop = FALSE; + + v4l2camsrc->device_mutex = g_mutex_new (); + + v4l2camsrc->mfldadvci = gst_v4l2camsrc_mfldadvci_wrapper_load (v4l2camsrc); + + v4l2camsrc->input_sensor = GST_CAMERA_INPUT_SENSOR_PRIMARY; + v4l2camsrc->ae_metering_mode = GST_CAMERA_AE_METERING_AUTO; + v4l2camsrc->ae_mode = GST_CAMERA_AE_MODE_AUTO; + v4l2camsrc->af_metering_mode = GST_CAMERA_AF_METERING_AUTO; + + v4l2camsrc->ae_window.x_left = 0; + v4l2camsrc->ae_window.x_right = 0; + v4l2camsrc->ae_window.y_bottom = 0; + v4l2camsrc->ae_window.y_top = 0; + v4l2camsrc->ae_window.weight= 0x8000; + v4l2camsrc->af_window.x_left = 0; + v4l2camsrc->af_window.x_right = 0; + v4l2camsrc->af_window.y_bottom = 0; + v4l2camsrc->af_window.y_top = 0; + v4l2camsrc->af_window.weight= 0x8000; + + libmfld_cam_init (v4l2camsrc->mfldadvci); + + GST_DEBUG ("initialized to commit %s", MFLD_V4L2CAMSRC_VERSION); +} + + +static void +gst_v4l2camsrc_dispose (GObject * object) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (object); + + if (v4l2camsrc->formats) { + gst_v4l2camsrc_clear_format_list (v4l2camsrc); + } + + if (v4l2camsrc->probed_caps) { + gst_caps_unref (v4l2camsrc->probed_caps); + v4l2camsrc->probed_caps = NULL; + } + + /* FIXME: driver cleanup function */ + if (v4l2camsrc->videodev) { + g_free (v4l2camsrc->videodev); + v4l2camsrc->videodev = NULL; + } + + if (v4l2camsrc->poll) { + gst_poll_free (v4l2camsrc->poll); + } + + g_mutex_free (v4l2camsrc->device_mutex); + + G_OBJECT_CLASS (parent_class)->dispose (object); + libmfld_cam_dispose (); +} + + +/* + * gst_v4l2camsrc_finalize: + * @object: + * + */ +static void +gst_v4l2camsrc_finalize (GObject * object) +{ + G_OBJECT_CLASS (parent_class)->finalize (object); +} + + + +/* + */ +static void +gst_v4l2camsrc_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (object); + gboolean opened = GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc); + + switch (prop_id) { + case PROP_DEVICE: + g_free (v4l2camsrc->videodev); + v4l2camsrc->videodev = g_value_dup_string (value); + break; + case PROP_QUEUE_SIZE: + v4l2camsrc->num_buffers = g_value_get_uint (value); + v4l2camsrc->tmp_num_buffers = v4l2camsrc->num_buffers; + break; + case PROP_INPUT_SENSOR: + v4l2camsrc->input_sensor = g_value_get_enum (value); + break; + case PROP_USE_MMAP: + v4l2camsrc->use_mmap = g_value_get_boolean (value); + break; + case PROP_USE_COPY: + v4l2camsrc->use_copy = g_value_get_boolean (value); + break; + case PROP_AE: + v4l2camsrc->ae_enabled = g_value_get_boolean (value); + break; + case PROP_AE_WINDOW: + { + GstStructure *ae_window_param = NULL; + char * ctmp = NULL; + gboolean parsed = TRUE; + + ctmp = g_malloc0 (60); + if (ctmp == NULL) + { + GST_DEBUG_OBJECT(v4l2camsrc, "alloc string mem failed.\n"); + break; + } + strncpy (ctmp,"ae,",3); + strncat (ctmp,g_value_get_string (value),55); + ae_window_param = gst_structure_from_string (ctmp, NULL); + if(ae_window_param == NULL) + { + GST_DEBUG_OBJECT(v4l2camsrc,"wrong string format is entered. stop setting ae window.\n"); + g_free (ctmp); + break; + } + + parsed = gst_structure_get_int(ae_window_param, "x_left", + &v4l2camsrc->ae_window.x_left); + parsed |= gst_structure_get_int(ae_window_param, "x_right", + &v4l2camsrc->ae_window.x_right); + parsed |= gst_structure_get_int(ae_window_param, "y_top", + &v4l2camsrc->ae_window.y_top); + parsed |= gst_structure_get_int(ae_window_param, "y_bottom", + &v4l2camsrc->ae_window.y_bottom); + + if (parsed == FALSE) + { + GST_DEBUG_OBJECT(v4l2camsrc,"cannot parse ae window parameter. \n"); + gst_structure_free (ae_window_param); + g_free (ctmp); + break; + } + + parsed = gst_structure_get_int(ae_window_param, "weight", + &v4l2camsrc->ae_window.weight); + + if (parsed == FALSE) + { + GST_DEBUG_OBJECT(v4l2camsrc, + "set ae window weight to default value 0x8000.\n"); + v4l2camsrc->ae_window.weight = 0x8000; + } + + GST_DEBUG_OBJECT(v4l2camsrc,"ae-window-setting: x_left:%d, x_right:%d," + "y_bottom:%d, y_top:%d, weight:%d.\n", + v4l2camsrc->ae_window.x_left,v4l2camsrc->ae_window.x_right, + v4l2camsrc->ae_window.y_bottom, v4l2camsrc->ae_window.y_top, + v4l2camsrc->ae_window.weight); + + + if (v4l2camsrc->ae_metering_mode != GST_CAMERA_AE_METERING_SPOT) + { + GST_DEBUG_OBJECT(v4l2camsrc,"wrong ae metering mode. set it to spot mode automaticly.\n"); + v4l2camsrc->ae_metering_mode = GST_CAMERA_AE_METERING_SPOT; + v4l2camsrc->mfldadvci->AeSetMeteringMode + ( (advci_ae_metering_mode) v4l2camsrc->ae_metering_mode); + + } + v4l2camsrc->mfldadvci->AeSetWindow ((advci_window *) (&(v4l2camsrc->ae_window))); + + gst_structure_free (ae_window_param); + g_free (ctmp); + break; + } + + case PROP_AE_METERING_MODE: + { + v4l2camsrc->ae_metering_mode = g_value_get_enum (value); + v4l2camsrc->mfldadvci->AeSetMeteringMode((advci_ae_metering_mode)v4l2camsrc->ae_metering_mode); + break; + } + case PROP_AE_MODE: + { + v4l2camsrc->ae_mode = g_value_get_enum (value); + v4l2camsrc->mfldadvci->AeSetMode((advci_ae_mode)v4l2camsrc->ae_mode); + break; + } + case PROP_AF_METERING_MODE: + { + v4l2camsrc->af_metering_mode = g_value_get_enum (value); + v4l2camsrc->mfldadvci->AfSetMeteringMode((advci_af_metering_mode)v4l2camsrc->af_metering_mode); + break; + } + case PROP_AF_WINDOW: + { + GstStructure *af_window_param = NULL; + char * ctmp = NULL; + gboolean parsed = TRUE; + + ctmp = g_malloc0 (60); + if (ctmp == NULL) + { + GST_DEBUG_OBJECT(v4l2camsrc, "alloc string mem failed.\n"); + break; + } + strncpy (ctmp,"af,",3); + strncat (ctmp,g_value_get_string (value),55); + af_window_param = gst_structure_from_string (ctmp, NULL); + if(af_window_param == NULL) + { + GST_DEBUG_OBJECT(v4l2camsrc,"wrong string format is entered. stop setting af window.\n"); + g_free (ctmp); + break; + } + + parsed = gst_structure_get_int(af_window_param, "x_left", + &v4l2camsrc->af_window.x_left); + parsed |= gst_structure_get_int(af_window_param, "x_right", + &v4l2camsrc->af_window.x_right); + parsed |= gst_structure_get_int(af_window_param, "y_top", + &v4l2camsrc->af_window.y_top); + parsed |= gst_structure_get_int(af_window_param, "y_bottom", + &v4l2camsrc->af_window.y_bottom); + + if (parsed == FALSE) + { + GST_DEBUG_OBJECT(v4l2camsrc,"cannot parse af window parameter. \n"); + gst_structure_free (af_window_param); + g_free (ctmp); + break; + } + + parsed = gst_structure_get_int(af_window_param, "weight", + &v4l2camsrc->af_window.weight); + + if (parsed == FALSE) + { + GST_DEBUG_OBJECT(v4l2camsrc, + "set af window weight to default value 0x8000.\n"); + v4l2camsrc->af_window.weight = 0x8000; + } + + GST_DEBUG_OBJECT(v4l2camsrc,"af-window-setting: x_left:%d, x_right:%d," + "y_bottom:%d, y_top:%d, weight:%d.\n", + v4l2camsrc->af_window.x_left,v4l2camsrc->af_window.x_right, + v4l2camsrc->af_window.y_bottom, v4l2camsrc->af_window.y_top, + v4l2camsrc->af_window.y_top); + + + v4l2camsrc->mfldadvci->AfSetWindows (1, (advci_window *) (&(v4l2camsrc->af_window))); + + gst_structure_free (af_window_param); + g_free (ctmp); + break; + } + + case PROP_AF: + v4l2camsrc->af_enabled = g_value_get_boolean (value); + break; + + case PROP_AWB: + v4l2camsrc->awb_enabled = g_value_get_boolean (value); + break; + case PROP_STILL_AF: + v4l2camsrc->still_af = g_value_get_boolean (value); + cam_set_autofocus (v4l2camsrc->still_af); + break; + case PROP_FOCUS_POSITION: + v4l2camsrc->focus_posi = g_value_get_int(value); + if (opened) + cam_driver_set_focus_posi (v4l2camsrc->video_fd, v4l2camsrc->focus_posi); + else + v4l2camsrc->focus_updated = TRUE; + break; + case PROP_VFLIP: + v4l2camsrc->vflip = g_value_get_boolean(value); + if (gst_v4l2camsrc_is_open (GST_CAMERA_SRC (v4l2camsrc))) + gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_VFLIP,v4l2camsrc->vflip); + break; + case PROP_HFLIP: + v4l2camsrc->hflip = g_value_get_boolean(value); + if (gst_v4l2camsrc_is_open (GST_CAMERA_SRC (v4l2camsrc))) + gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_HFLIP,v4l2camsrc->hflip); + break; + case PROP_BAYER_DOWNSCALING: + v4l2camsrc->bayer_downscaling = g_value_get_boolean (value); + break; + case PROP_CAPTURE_CORRECTION_GDC: + v4l2camsrc->gdc_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_GDC, v4l2camsrc->gdc_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_CAC: + v4l2camsrc->cac_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_CAC, v4l2camsrc->cac_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_RER: + v4l2camsrc->rer_enabled = g_value_get_boolean (value); + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_RER, v4l2camsrc->rer_enabled); + break; + case PROP_CAPTURE_CORRECTION_DIS: + v4l2camsrc->dis_enabled = g_value_get_boolean (value); + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_DIS, v4l2camsrc->dis_enabled); + break; + case PROP_CAPTURE_CORRECTION_DVS: + v4l2camsrc->dvs_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_DVS, v4l2camsrc->dvs_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_EDGE_ENHANCEMENT: + v4l2camsrc->ee_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_EE, v4l2camsrc->ee_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_SHADING_CORRECTION: + v4l2camsrc->sc_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_SC, v4l2camsrc->sc_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_BLACK_LEVEL_COMPENSATION: + v4l2camsrc->blc_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_BLC, v4l2camsrc->blc_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_BAD_PIXEL_DETECTION: + v4l2camsrc->bpd_enabled = g_value_get_boolean (value); + if (opened) + cam_set_capture_correction (v4l2camsrc->video_fd, + CAM_CAPTURE_CORRECTION_BPD, v4l2camsrc->bpd_enabled); + else + v4l2camsrc->cc_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_GAMMA: + v4l2camsrc->tone.gamma = g_value_get_float (value); + if (opened) + cam_set_tone_control (v4l2camsrc->video_fd, CAM_GAMMA_VALUE, + &v4l2camsrc->tone); + else + v4l2camsrc->gamma_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_CONTRAST: + v4l2camsrc->tone.contrast = g_value_get_int (value); + if (opened) + cam_set_tone_control (v4l2camsrc->video_fd, CAM_CONTRAST_VALUE, + &v4l2camsrc->tone); + else + v4l2camsrc->gamma_updated = TRUE; + break; + case PROP_CAPTURE_CORRECTION_BRIGHTNESS: + v4l2camsrc->tone.brightness = g_value_get_int (value); + if (opened) + cam_set_tone_control (v4l2camsrc->video_fd, CAM_BRIGHTNESS_VALUE, + &v4l2camsrc->tone); + else + v4l2camsrc->gamma_updated = TRUE; + break; + case PROP_DUMP_RAW: + v4l2camsrc->dump_raw = g_value_get_boolean (value); + break; + case PROP_DUMP_IMAGE: + v4l2camsrc->dump_image = g_value_get_boolean (value); + break; + case PROP_DEBUG_FLAGS: + v4l2camsrc->debug_flags = g_value_get_flags (value); + break; + case PROP_DISABLE_LOW_RES_CROP: + v4l2camsrc->disable_low_res_crop = g_value_get_boolean (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + + + } +} + +/* + */ +static void +gst_v4l2camsrc_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (object); + + switch (prop_id) { + case PROP_DEVICE: + g_value_set_string (value, v4l2camsrc->videodev); + break; + case PROP_DEVICE_NAME: + { + const guchar *new = NULL; + + if (gst_v4l2camsrc_is_open (GST_CAMERA_SRC (v4l2camsrc))) { + new = v4l2camsrc->vcap.card; + } else if (gst_v4l2camsrc_open (GST_CAMERA_SRC (v4l2camsrc))) { + new = v4l2camsrc->vcap.card; + gst_v4l2camsrc_close (GST_CAMERA_SRC (v4l2camsrc)); + gst_camerasrc_clear_color_channels (GST_CAMERA_SRC (v4l2camsrc)); + } + g_value_set_string (value, (gchar *) new); + break; + } + case PROP_DEVICE_FD: + { + if (gst_v4l2camsrc_is_open (GST_CAMERA_SRC (v4l2camsrc))) + g_value_set_int (value, v4l2camsrc->video_fd); + else + g_value_set_int (value, DEFAULT_PROP_DEVICE_FD); + break; + } + case PROP_QUEUE_SIZE: + g_value_set_uint (value, v4l2camsrc->num_buffers); + break; + case PROP_INPUT_SENSOR: + g_value_set_enum (value, v4l2camsrc->input_sensor); + break; + case PROP_USE_MMAP: + g_value_set_boolean (value, v4l2camsrc->use_mmap); + break; + case PROP_USE_COPY: + g_value_set_boolean (value, v4l2camsrc->use_copy); + break; + case PROP_AE: + g_value_set_boolean (value, v4l2camsrc->ae_enabled); + break; + case PROP_AE_METERING_MODE: + g_value_set_enum (value, v4l2camsrc->ae_metering_mode); + break; + case PROP_AE_MODE: + g_value_set_enum (value, v4l2camsrc->ae_mode); + break; + case PROP_AE_WINDOW: + { + GstStructure *tmp = NULL; + tmp = gst_structure_empty_new("ae-window"); + if(tmp == NULL) + { + GST_DEBUG ("wrong default ae window setting.\n"); + break; + } + gst_structure_set (tmp,"x_left", G_TYPE_INT, + v4l2camsrc->ae_window.x_left,NULL); + gst_structure_set (tmp,"x_right", G_TYPE_INT, + v4l2camsrc->ae_window.x_right,NULL); + gst_structure_set (tmp,"y_bottom", G_TYPE_INT, + v4l2camsrc->ae_window.y_bottom,NULL); + gst_structure_set (tmp,"y_top", G_TYPE_INT, + v4l2camsrc->ae_window.y_top,NULL); + g_value_set_string(value, gst_structure_to_string(tmp)); + gst_structure_free(tmp); + break; + } + + case PROP_AF: + g_value_set_boolean (value, v4l2camsrc->af_enabled); + break; + case PROP_AF_METERING_MODE: + g_value_set_enum (value, v4l2camsrc->af_metering_mode); + break; + case PROP_AF_WINDOW: + { + GstStructure *tmp = NULL; + tmp = gst_structure_empty_new("af-window"); + if(tmp == NULL) + { + GST_DEBUG ("wrong default af window setting.\n"); + break; + } + gst_structure_set (tmp,"x_left", G_TYPE_INT, + v4l2camsrc->af_window.x_left,NULL); + gst_structure_set (tmp,"x_right", G_TYPE_INT, + v4l2camsrc->af_window.x_right,NULL); + gst_structure_set (tmp,"y_bottom", G_TYPE_INT, + v4l2camsrc->af_window.y_bottom,NULL); + gst_structure_set (tmp,"y_top", G_TYPE_INT, + v4l2camsrc->af_window.y_top,NULL); + g_value_set_string(value, gst_structure_to_string(tmp)); + gst_structure_free(tmp); + break; + } + + case PROP_AWB: + g_value_set_boolean (value, v4l2camsrc->awb_enabled); + break; + case PROP_STILL_AF: + g_value_set_boolean (value, v4l2camsrc->still_af); + break; + case PROP_FOCUS_POSITION: + g_value_set_int (value, v4l2camsrc->focus_posi); + break; + case PROP_VFLIP: + g_value_set_boolean (value, v4l2camsrc->vflip); + break; + case PROP_HFLIP: + g_value_set_boolean (value, v4l2camsrc->hflip); + break; + case PROP_BAYER_DOWNSCALING: + g_value_set_boolean (value, v4l2camsrc->bayer_downscaling); + break; + case PROP_CAPTURE_CORRECTION_GDC: + g_value_set_boolean (value, v4l2camsrc->gdc_enabled); + break; + case PROP_CAPTURE_CORRECTION_CAC: + g_value_set_boolean (value, v4l2camsrc->cac_enabled); + break; + case PROP_CAPTURE_CORRECTION_RER: + g_value_set_boolean (value, v4l2camsrc->rer_enabled); + break; + case PROP_CAPTURE_CORRECTION_DIS: + g_value_set_boolean (value, v4l2camsrc->dis_enabled); + break; + case PROP_CAPTURE_CORRECTION_DVS: + g_value_set_boolean (value, v4l2camsrc->dvs_enabled); + break; + case PROP_CAPTURE_CORRECTION_EDGE_ENHANCEMENT: + g_value_set_boolean (value, v4l2camsrc->ee_enabled); + break; + case PROP_CAPTURE_CORRECTION_SHADING_CORRECTION: + g_value_set_boolean (value, v4l2camsrc->sc_enabled); + break; + case PROP_CAPTURE_CORRECTION_BLACK_LEVEL_COMPENSATION: + g_value_set_boolean (value, v4l2camsrc->blc_enabled); + break; + case PROP_CAPTURE_CORRECTION_BAD_PIXEL_DETECTION: + g_value_set_boolean (value, v4l2camsrc->bpd_enabled); + break; + case PROP_CAPTURE_CORRECTION_GAMMA: + g_value_set_float (value, v4l2camsrc->tone.gamma); + break; + case PROP_CAPTURE_CORRECTION_CONTRAST: + g_value_set_int (value, v4l2camsrc->tone.contrast); + break; + case PROP_CAPTURE_CORRECTION_BRIGHTNESS: + g_value_set_int (value, v4l2camsrc->tone.brightness); + break; + case PROP_DUMP_RAW: + g_value_set_boolean (value, v4l2camsrc->dump_raw); + break; + case PROP_DUMP_IMAGE: + g_value_set_boolean (value, v4l2camsrc->dump_image); + break; + case PROP_DEBUG_FLAGS: + g_value_set_flags (value, v4l2camsrc->debug_flags); + break; + case PROP_DISABLE_LOW_RES_CROP: + g_value_set_boolean (value, v4l2camsrc->disable_low_res_crop); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + + + +static gboolean +gst_v4l2camsrc_unlock (GstCameraSrc * src) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (src); + + GST_LOG_OBJECT (v4l2camsrc, "Flushing"); + gst_poll_set_flushing (v4l2camsrc->poll, TRUE); + + return TRUE; +} + +static gboolean +gst_v4l2camsrc_unlock_stop (GstCameraSrc * src) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (src); + + GST_LOG_OBJECT (v4l2camsrc, "No longer flushing"); + gst_poll_set_flushing (v4l2camsrc->poll, FALSE); + + return TRUE; +} + +/* + * gst_v4l2camsrc_set_zoom: + * @camsrc: @GstCameraSrc object. + * @zoom: zoom factor to be set. + * + * Set the zoom factor for outputted video. + * + * Returns: TRUE on success. + */ +static gboolean +gst_v4l2camsrc_set_zoom (GstCameraSrc * camsrc, gfloat zoom) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = TRUE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + GST_DEBUG_OBJECT (v4l2camsrc, "ZOOM: %f", zoom); + + v4l2camsrc->zoom_factor = zoom; + if (v4l2camsrc->is_active) { + g_mutex_lock (v4l2camsrc->device_mutex); + ret = gst_v4l2camsrc_libmfldcam_set_zoom (v4l2camsrc, zoom); + g_mutex_unlock (v4l2camsrc->device_mutex); + } else + v4l2camsrc->zoom_updated = TRUE; + + return ret; +} +static gboolean +gst_v4l2camsrc_set_ae_mode(GstCameraSrc * camsrc, int mode) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = TRUE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + if (v4l2camsrc->is_active) { + g_mutex_lock (v4l2camsrc->device_mutex); + v4l2camsrc->ae_metering_mode = *(GstCameraAEMeteringMode *)&mode; + v4l2camsrc->mfldadvci->AeSetMeteringMode((advci_ae_metering_mode)v4l2camsrc->ae_metering_mode); + g_mutex_unlock (v4l2camsrc->device_mutex); + } + + return ret; +} +static gboolean +gst_v4l2camsrc_get_ae_mode(GstCameraSrc * camsrc, int *mode) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = TRUE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + if (v4l2camsrc->is_active) { + g_mutex_lock (v4l2camsrc->device_mutex); + *mode = v4l2camsrc->ae_metering_mode; + g_mutex_unlock (v4l2camsrc->device_mutex); + } + + return ret; +} + +static gboolean +gst_v4l2camsrc_get_makernote (GstCameraSrc * camsrc, unsigned char *buf, unsigned size) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = TRUE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + GST_DEBUG_OBJECT (v4l2camsrc, "%s, !!!!!!line:%d\n", __func__, __LINE__); + + if (v4l2camsrc->is_active) { + g_mutex_lock (v4l2camsrc->device_mutex); + ret = gst_v4l2camsrc_libmfldcam_get_makernote(v4l2camsrc, buf, size); + g_mutex_unlock (v4l2camsrc->device_mutex); + } + + return ret; +} + +static gboolean gst_v4l2camsrc_makernote_init(GstCameraSrc * camsrc, + unsigned * buf_size, + unsigned num_afwindows, + unsigned num_faces, + unsigned num_eyes, + unsigned num_grid, + int *handle) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + GST_DEBUG_OBJECT (v4l2camsrc, "%s, !!!!!!line:%d\n", __func__, __LINE__); + + num_afwindows = num_grid = 1; + num_faces = num_eyes = 0; + return FALSE; + +} + +static gboolean gst_v4l2camsrc_makernote_deal(GstCameraSrc * camsrc, + GstBuffer *pmakerbuf, + unsigned num_afwindows, + unsigned num_grid, + int handle) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + GST_DEBUG_OBJECT (v4l2camsrc, "%s, !!!!!!line:%d\n", __func__, __LINE__); + + return FALSE; + +} + +static gboolean gst_v4l2camsrc_makernote_uninit(GstCameraSrc * camsrc, + int handle) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + GST_DEBUG_OBJECT (v4l2camsrc, "%s, !!!!!!line:%d\n", __func__, __LINE__); + + return FALSE; +} + + +/* + * plugin_init: + * @plugin: GstPlugin + * + * Returns: TRUE on success. + */ +static gboolean +plugin_init (GstPlugin * plugin) +{ + GST_DEBUG_CATEGORY_INIT (gst_v4l2camsrc_debug, "camerasrc", 0, + "Medfield V4L2 camera source"); + + return gst_element_register (plugin, "camerasrc", + GST_RANK_NONE, GST_TYPE_V4L2CAMSRC); +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "mfldv4l2cam", + "V4L2 camera image capturing element", + plugin_init, VERSION, "LGPL", "Intel", "www.intel.com") diff --git a/gst/mfldv4l2cam/gstv4l2camsrc.h b/gst/mfldv4l2cam/gstv4l2camsrc.h new file mode 100644 index 0000000..5108e04 --- /dev/null +++ b/gst/mfldv4l2cam/gstv4l2camsrc.h @@ -0,0 +1,319 @@ +/* GStreamer V4L2 camera source + * Copyright (C) 2010 Nokia Corporation + * 2010 Intel Corporation + * + * Contact: Maemo Multimedia + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_V4L2CAMSRC_H__ +#define __GST_V4L2CAMSRC_H__ + +#include + +#include +#include +#include +#include "mfld_cam.h" +#include "gstv4l2mfldadvci.h" + +G_BEGIN_DECLS + +#define GST_TYPE_V4L2CAMSRC \ +(gst_v4l2camsrc_get_type()) +#define GST_V4L2CAMSRC(obj) \ +(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_V4L2CAMSRC, GstMFLDV4l2CamSrc)) +#define GST_V4L2CAMSRC_CLASS(klass) \ +(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_V4L2CAMSRC, GstMFLDV4l2CamSrcClass)) +#define GST_IS_V4L2CAMSRC(obj) \ +(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_V4L2CAMSRC)) +#define GST_IS_V4L2CAMSRC_CLASS(klass) \ +(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_V4L2CAMSRC)) + +#define MAX_RESIZER_FACTOR 10.0 +#define DEFAULT_RESIZER_FACTOR 1.0 + +#define GST_TYPE_CAMERA_INPUT_SENSOR (gst_camera_input_sensor_get_type()) + +#define GST_TYPE_CAMERA_AE_METERING_MODE (gst_camera_ae_metering_mode_get_type()) +#define GST_TYPE_CAMERA_AE_MODE (gst_camera_ae_mode_get_type()) +#define GST_TYPE_CAMERA_AF_METERING_MODE (gst_camera_af_metering_mode_get_type()) + +typedef struct _GstMFLDV4l2CamSrc GstMFLDV4l2CamSrc; +typedef struct _GstMFLDV4l2CamSrcClass GstMFLDV4l2CamSrcClass; + +typedef struct _GstV4l2Buffer GstV4l2Buffer; +typedef struct _GstMFLDV4l2CamSrcBufferPool GstMFLDV4l2CamSrcBufferPool; +typedef struct _GstVideoMode GstVideoMode; +typedef struct _GstCameraWindow GstCameraWindow; + +/** + * GstCameraInputSensor: + * @GST_CAMERA_INPUT_SENSOR_PRIMARY: Pimary Sensor as ISP input + * @GST_CAMERA_INPUT_SENSOR_SECONDARY: Secondary Sensor as ISP input + * + * Camera element Input sensor + */ +typedef enum { + GST_CAMERA_INPUT_SENSOR_PRIMARY, + GST_CAMERA_INPUT_SENSOR_SECONDARY +} GstCameraInputSensor; + +/** + * GstCameraAEMeteringMode: + * @GST_CAMERA_AE_METERING_AUTO: Auto Exposure auto metering mode + * @GST_CAMERA_AE_METERING_SPOT: Auto Exposure spot metering mode + * @GST_CAMERA_AE_METERING_CENTER: Auto Exposure center metering mode + * @GST_CAMERA_AE_METERING_CUSTOMIZED: Auto Exposure customized metering mode + * + * Camera element auto exposure metering mode + */ +typedef enum { + GST_CAMERA_AE_METERING_AUTO, + GST_CAMERA_AE_METERING_SPOT, + GST_CAMERA_AE_METERING_CENTER, + GST_CAMERA_AE_METERING_CUSTOMIZED +} GstCameraAEMeteringMode; + +/** + * GstCameraAEMode: + * @GST_CAMERA_AE_MODE_AUTO: Auto Exposure automatic mode + * @GST_CAMERA_AE_MODE_MANUAL: Auto Exposure manual mode + * @GST_CAMERA_AE_MODE_SHUTTER_PRIORITY: Auto Exposure shutter priority mode + * @GST_CAMERA_AE_MODE_APERTURE_PRIORITY: Auto Exposure aperture priority mode + * + * Camera element auto exposure mode + */ +typedef enum { + GST_CAMERA_AE_MODE_AUTO, + GST_CAMERA_AE_MODE_MANUAL, + GST_CAMERA_AE_MODE_SHUTTER_PRIORITY, + GST_CAMERA_AE_MODE_APERTURE_PRIORITY +} GstCameraAEMode; + +/** + * GstCameraAFMeteringMode: + * @GST_CAMERA_AF_METERING_AUTO: Auto Focus auto metering mode + * @GST_CAMERA_AF_METERING_SPOT: Auto Focus spot metering mode + * @GST_CAMERA_AF_METERING_CENTER: Auto Focus center metering mode + * @GST_CAMERA_AF_METERING_CUSTOMIZED: Auto Focus customized metering mode + * + * Camera element auto exposure metering mode + */ +typedef enum { + GST_CAMERA_AF_METERING_AUTO, + GST_CAMERA_AF_METERING_SPOT, + GST_CAMERA_AF_METERING_CENTER, + GST_CAMERA_AF_METERING_CUSTOMIZED +} GstCameraAFMeteringMode; + +typedef enum { + GST_CAMERASRC_DEBUG_FLAGS_PERFORMANCE = (1 << 0), + GST_CAMERASRC_DEBUG_FLAGS_MAKER_NOTE = (1 << 1), + GST_CAMERASRC_DEBUG_FLAGS_AUTO_FOCUS = (1 << 2) +} GstCameraSrcDebugFlags; +#define GST_TYPE_CAMERASRC_DEBUG_FLAGS (gst_camerasrc_debug_flags_get_type()) +GType gst_camerasrc_debug_flags_get_type (void); + +/** + * GstVideoMode: + * @width: Frame width. + * @height: Frame height. + * @fps_n: Frame rate numerator. + * @fps_d: Frame rate denominator. + * @color_code: Frame color code in subdev format (v4l2_mbus_pixelcode). + * + * Structure containing information about video mode and related buffer pool, + * active buffer and lock protecting it. + */ +struct _GstVideoMode +{ + guint width, height; + guint fps_n, fps_d; + guint32 color_code; +}; + +/** + * GstCameraAEWindow: + * @x_left: x left coordinate. + * @x_right: x right coordinate. + * @y_top: y top coordinate. + * @y_bottom: y bottom coordinate. + * @weight: weight of AE Window + * + * Structure containing information about video mode and related buffer pool, + * active buffer and lock protecting it. + */ + +struct _GstCameraWindow { + gint x_left; + gint x_right; + gint y_top; + gint y_bottom; + gint weight; +}; + +/** + * GstV4l2Buffer: + * @buffer: parent GstBuffer + * @pool: reference to the owning #GstMFLDV4l2CamSrcBufferPool + * @gbuffer: GstBuffer allocated for this buffer for holding the data + * @vbuffer: v4l2_buffer structure + * + * Opaque object. +*/ + +struct _GstV4l2Buffer { + GstBuffer buffer; + GstMFLDV4l2CamSrcBufferPool *pool; + GstBuffer *gbuffer; + void *vbuffer; + gboolean use_mmap; +}; + + +/** + * GstMFLDV4l2CamSrcBufferPool: + * @parent: the parent object + * @lock: pool lock + * @running: TRUE if the pool is being used + * @num_live_buffers: number of buffers being pushed outside element + * @buffer_count: total number of buffers used + * @buffers: buffer table + * @video_fd: video device which produces the buffers + * @queued: lookup table for buffers whether they're queued to driver + * @data_cond: condition being used for waiting a buffer to become available + * + * Opaque #GstMFLDV4l2CamSrcBufferPool object. + */ +struct _GstMFLDV4l2CamSrcBufferPool +{ + GstMiniObject parent; + + GMutex *lock; + gboolean running; /* with lock */ + gint num_live_buffers; /* number of buffers not with driver */ + guint buffer_count; + GstV4l2Buffer **buffers; + + gint video_fd; /* a dup(2) of the v4l2object's video_fd */ + guint *queued; + GCond* data_cond; + gboolean is_vaapi_sharing; +}; + +/** +* GstMFLDV4l2CamSrc: +* @element: the parent element. +* +* The opaque #GstMFLDV4l2CamSrc data structure. +*/ +struct _GstMFLDV4l2CamSrc { + GstCameraSrc element; + + /*< private >*/ + char *videodev; /* video device file name */ + gint video_fd; /* the video-device's file descriptor */ + gboolean is_open; /* TRUE, when ISP is opened */ + gboolean is_active; /* TRUE, when streaming buffers */ + GstMFLDV4l2CamSrcBufferPool *pool; + guint32 num_buffers; + guint32 tmp_num_buffers; + guint64 offset; + GstPoll * poll; + guint8 **buffer; /* the video buffer (mmap()'ed) */ + GSList *formats; /* list of available capture formats */ + GstCaps *probed_caps; + guint32 frame_byte_size; + GMutex *device_mutex; + gboolean enabled_3a; + gboolean use_copy; + + guint vf_w, vf_h; + guint vf_fps_n, vf_fps_d; + guint32 vf_fourcc; + + guint capture_w, capture_h; + guint32 expected_capture_w, expected_capture_h; + guint capture_fps_n, capture_fps_d; + guint32 capture_fourcc, expected_capture_fourcc; + + struct v4l2_capability vcap; /* the video device's capabilities */ + struct v4l2_cropcap vcrop; /* cropping & scaling capabilities */ + gboolean crop_supported; + gfloat max_zoom_factor; /* Maximum zoom factor */ + gfloat zoom_factor; /* Current zoom factor */ + + /* MFLD camera advanced features */ + gboolean gdc_enabled, cac_enabled, ee_enabled, sc_enabled, dvs_enabled; + gboolean rer_enabled, dis_enabled, ae_enabled, af_enabled, awb_enabled; + gboolean blc_enabled, bpd_enabled; + gboolean caf_enabled; + + gboolean preflash_enabled, capflash_enabled; + guint16 preflash_analoggain; // sensor's, get the analog gain from sensor when pre flash + + gboolean use_mmap; + gboolean still_af; + gboolean bayer_downscaling; + struct tone_control tone; + gboolean cc_updated, gamma_updated, focus_updated; /* Whether the advanced features are updated */ + gboolean zoom_updated; + gint focus_posi; + gboolean vflip,hflip; + GstCameraAEMeteringMode ae_metering_mode; + GstCameraAEMode ae_mode; + GstCameraAFMeteringMode af_metering_mode; + GstCameraWindow ae_window; + GstCameraWindow af_window; + + + /* Support for RAW output */ + gboolean dump_raw; + gchar *raw_output_directory; + guint raw_output_size; + + GstCameraSrcCaptureMode capture_mode; + gboolean initialized; + gboolean running; + GstClockTime ctrl_time; + + GModule *module; + GstV4l2MFLDAdvCI *mfldadvci; + GstCameraInputSensor input_sensor; + gboolean dump_image; + GstCameraSrcDebugFlags debug_flags; + + gboolean disable_low_res_crop; +}; + + +/** +* GstMFLDV4l2CamSrcClass: +* @parent_class: Element parent class. +* +* The opaque GstMFLDV4l2CamSrcClass data structure. +*/ +struct _GstMFLDV4l2CamSrcClass { + GstCameraSrcClass parent_class; +}; + +GType gst_v4l2camsrc_get_type (void); + +G_END_DECLS + +#endif /* __GST_V4L2CAMSRC_H__ */ diff --git a/gst/mfldv4l2cam/gstv4l2camvidorient.c b/gst/mfldv4l2cam/gstv4l2camvidorient.c new file mode 100644 index 0000000..f5cc58f --- /dev/null +++ b/gst/mfldv4l2cam/gstv4l2camvidorient.c @@ -0,0 +1,111 @@ +/* GStreamer + * + * Copyright (C) 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * 2010 Intel Corporation + * + * gstv4l2vidorient.c: video orientation interface implementation for V4L2 + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include "gstv4l2camsrc.h" +#include "v4l2camsrc_calls.h" +#include "gstv4l2camvidorient.h" + + + +GST_DEBUG_CATEGORY_STATIC (v4l2camvo_debug); +#define GST_CAT_DEFAULT v4l2camvo_debug + +/* Those are deprecated calls that have been replaced */ +#ifndef V4L2_CID_HCENTER +#define V4L2_CID_HCENTER V4L2_CID_PAN_RESET +#endif +#ifndef V4L2_CID_VCENTER +#define V4L2_CID_VCENTER V4L2_CID_TILT_RESET +#endif + +void +gst_v4l2camsrc_vo_interface_init (GstVideoOrientationInterface * klass) +{ + GST_DEBUG_CATEGORY_INIT (v4l2camvo_debug, "v4l2camvo", 0, + "V4L2 VideoOrientation interface debugging"); +} + + +gboolean +gst_v4l2camsrc_vo_get_hflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean * flip) +{ + return gst_v4l2camsrc_get_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_HFLIP, flip); +} + +gboolean +gst_v4l2camsrc_vo_get_vflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean * flip) +{ + return gst_v4l2camsrc_get_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_VFLIP, flip); +} + +gboolean +gst_v4l2camsrc_vo_get_hcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint * center) +{ + return gst_v4l2camsrc_get_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_HCENTER, center); +} + +gboolean +gst_v4l2camsrc_vo_get_vcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint * center) +{ + return gst_v4l2camsrc_get_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_VCENTER, center); +} + +gboolean +gst_v4l2camsrc_vo_set_hflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean flip) +{ + return gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_HFLIP, flip); +} + +gboolean +gst_v4l2camsrc_vo_set_vflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean flip) +{ + return gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_VFLIP, flip); +} + +gboolean +gst_v4l2camsrc_vo_set_hcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint center) +{ + return gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_HCENTER, center); +} + +gboolean +gst_v4l2camsrc_vo_set_vcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint center) +{ + return gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc), + V4L2_CID_VCENTER, center); +} diff --git a/gst/mfldv4l2cam/gstv4l2camvidorient.h b/gst/mfldv4l2cam/gstv4l2camvidorient.h new file mode 100644 index 0000000..e5d955a --- /dev/null +++ b/gst/mfldv4l2cam/gstv4l2camvidorient.h @@ -0,0 +1,120 @@ +/* GStreamer + * + * Copyright (C) 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * 2010 Intel Corporation + * + * gstv4l2vidorient.h: video orientation interface implementation for V4L2 + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_V4L2CAMSRC_VIDORIENT_H__ +#define __GST_V4L2CAMSRC_VIDORIENT_H__ + +#include +#include + +#include "gstv4l2camsrc.h" + + +G_BEGIN_DECLS + +void gst_v4l2camsrc_vo_interface_init (GstVideoOrientationInterface * klass); + +gboolean gst_v4l2camsrc_vo_get_hflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean *flip); +gboolean gst_v4l2camsrc_vo_get_vflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean *flip); +gboolean gst_v4l2camsrc_vo_get_hcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint *center); +gboolean gst_v4l2camsrc_vo_get_vcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint *center); + +gboolean gst_v4l2camsrc_vo_set_hflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean flip); +gboolean gst_v4l2camsrc_vo_set_vflip (GstMFLDV4l2CamSrc * v4l2camsrc, gboolean flip); +gboolean gst_v4l2camsrc_vo_set_hcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint center); +gboolean gst_v4l2camsrc_vo_set_vcenter (GstMFLDV4l2CamSrc * v4l2camsrc, gint center); + +#define GST_IMPLEMENT_V4L2CAMSRC_VIDORIENT_METHODS(Type, interface_as_function) \ + \ + static gboolean \ + interface_as_function ## _video_orientation_get_hflip (GstVideoOrientation *vo, gboolean *flip) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_get_hflip (this, flip); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_get_vflip (GstVideoOrientation *vo, gboolean *flip) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_get_vflip (this, flip); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_get_hcenter (GstVideoOrientation *vo, gint *center) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_get_hcenter (this, center); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_get_vcenter (GstVideoOrientation *vo, gint *center) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_get_vcenter (this, center); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_set_hflip (GstVideoOrientation *vo, gboolean flip) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_set_hflip (this, flip); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_set_vflip (GstVideoOrientation *vo, gboolean flip) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_set_vflip (this, flip); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_set_hcenter (GstVideoOrientation *vo, gint center) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_set_hcenter (this, center); \ + } \ + \ + static gboolean \ + interface_as_function ## _video_orientation_set_vcenter (GstVideoOrientation *vo, gint center) \ + { \ + Type *this = (Type*) vo; \ + return gst_v4l2camsrc_vo_set_vcenter (this, center); \ + } \ + \ + void \ + interface_as_function ## _video_orientation_interface_init (GstVideoOrientationInterface * klass) \ + { \ + /* default virtual functions */ \ + klass->get_hflip = interface_as_function ## _video_orientation_get_hflip; \ + klass->get_vflip = interface_as_function ## _video_orientation_get_vflip; \ + klass->get_hcenter = interface_as_function ## _video_orientation_get_hcenter; \ + klass->get_vcenter = interface_as_function ## _video_orientation_get_vcenter; \ + klass->set_hflip = interface_as_function ## _video_orientation_set_hflip; \ + klass->set_vflip = interface_as_function ## _video_orientation_set_vflip; \ + klass->set_hcenter = interface_as_function ## _video_orientation_set_hcenter; \ + klass->set_vcenter = interface_as_function ## _video_orientation_set_vcenter; \ + } + +#endif /* __GST_V4L2CAMSRC_VIDORIENT_H__ */ diff --git a/gst/mfldv4l2cam/v4l2camsrc_calls.c b/gst/mfldv4l2cam/v4l2camsrc_calls.c new file mode 100644 index 0000000..55fe886 --- /dev/null +++ b/gst/mfldv4l2cam/v4l2camsrc_calls.c @@ -0,0 +1,3791 @@ +/* GStreamer + * + * Copyright (C) 2002 Ronald Bultje + * 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * 2010 Intel Corporation + * + * v4l2camsrc.c - system calls + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#ifdef __sun +/* Needed on older Solaris Nevada builds (72 at least) */ +#include +#include +#endif + +#include +#include + +#include "v4l2camsrc_calls.h" +#include +#include +#include +GST_DEBUG_CATEGORY_EXTERN (gst_v4l2camsrc_debug); +#define GST_CAT_DEFAULT gst_v4l2camsrc_debug + + +static const gint gst_v4l2camsrc_capture_map[] = { + CAM_VIEWFINDER_MODE_VIEWFINDER, + CAM_VIEWFINDER_MODE_STILL_CAPTURE, + CAM_VIEWFINDER_MODE_VIDEO_RECORD, + -1 +}; + +static const gint gst_v4l2camsrc_effect_map[] = { + CAM_GENERAL_EFFECT_TYPE_NORMAL, + CAM_GENERAL_EFFECT_TYPE_SEPIA, + CAM_GENERAL_EFFECT_TYPE_NEGATIVE, + CAM_GENERAL_EFFECT_TYPE_GRAYSCALE, + CAM_GENERAL_EFFECT_TYPE_NORMAL, + CAM_GENERAL_EFFECT_TYPE_VIVID, + CAM_GENERAL_EFFECT_TYPE_NORMAL, + CAM_GENERAL_EFFECT_TYPE_NORMAL, + CAM_GENERAL_EFFECT_TYPE_NORMAL, + CAM_GENERAL_EFFECT_TYPE_SKY_BLUE, + CAM_GENERAL_EFFECT_TYPE_GRASS_GREEN, + CAM_GENERAL_EFFECT_TYPE_SKIN_WHITEN, + -1 +}; + +static const gint gst_v4l2camsrc_wb_map[] = { + CAM_AWB_MODE_AUTO, + CAM_AWB_MODE_DAYLIGHT, + CAM_AWB_MODE_SUNSET, + CAM_AWB_MODE_CLOUDY, + CAM_AWB_MODE_TUNGSTEN, + CAM_AWB_MODE_FLUORESCENT, + -1 +}; + + +static gint find_item (const gint table[], const gint item); +/* Define this to use memory locking for video buffers */ +/* #define USE_MLOCK */ + +/* lalala... */ +#define GST_V4L2CAMSRC_SET_ACTIVE(element) (element)->buffer = GINT_TO_POINTER (-1) +#define GST_V4L2CAMSRC_SET_INACTIVE(element) (element)->buffer = NULL + +/* On some systems MAP_FAILED seems to be missing */ +#ifndef MAP_FAILED +#define MAP_FAILED ((caddr_t) -1) +#endif + +#define RESIZER_MAX_DOWNSCALE_FACTOR 4 +#define V4L2CAMSRC_POLL_TIMEOUT (20 * GST_SECOND) + +#define GST_TYPE_V4L2CAMSRC_BUFFER (gst_v4l2camsrc_buffer_get_type()) +#define GST_IS_V4L2CAMSRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2CAMSRC_BUFFER)) +#define GST_V4L2CAMSRC_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2CAMSRC_BUFFER, GstV4l2Buffer)) + +/* Local functions */ +static gboolean +gst_v4l2camsrc_get_nearest_size (GstMFLDV4l2CamSrc * v4l2camsrc, + guint32 pixelformat, gint * width, gint * height); +static void gst_v4l2camsrc_buffer_pool_destroy (GstMFLDV4l2CamSrcBufferPool * + pool, GstMFLDV4l2CamSrc * v4l2camsrc); +static gboolean gst_v4l2camsrc_update_cropping (GstMFLDV4l2CamSrc * v4l2camsrc, + gint width, gint height, gfloat zoom); +static gboolean +gst_v4l2camsrc_check_focus_status (GstMFLDV4l2CamSrc * v4l2camsrc, + GstCameraFocusStatus *fs, gboolean detailed); + +static gboolean gst_v4l2camsrc_libmfldcam_init (GstMFLDV4l2CamSrc * v4l2camsrc); +static gboolean gst_v4l2camsrc_libmfldcam_deinit (GstMFLDV4l2CamSrc * + v4l2camsrc); + +/* + * GstCameraBuffer: + */ + +static GstBufferClass *v4l2buffer_parent_class = NULL; + +static void +gst_v4l2camsrc_buffer_finalize (GstV4l2Buffer * buffer) +{ + GstMFLDV4l2CamSrcBufferPool *pool; + gboolean resuscitated = FALSE; + struct v4l2_buffer *vbuffer; + gint index; + + pool = buffer->pool; + vbuffer = (struct v4l2_buffer *) buffer->vbuffer; + index = vbuffer->index; + + GST_LOG ("finalizing buffer %p %d", buffer, index); + + g_mutex_lock (pool->lock); + if (GST_BUFFER_SIZE (buffer) != 0) + /* BUFFER_SIZE is only set if the frame was dequeued */ + pool->num_live_buffers--; + + if (pool->running) { + if (pool->is_vaapi_sharing && buffer->gbuffer) + GST_BUFFER_DATA(buffer) = GST_BUFFER_DATA(buffer->gbuffer); + if (ioctl (pool->video_fd, VIDIOC_QBUF, vbuffer) < 0) { + GST_WARNING ("could not requeue buffer %p %d", buffer, index); + } else { + /* FIXME: check that the caps didn't change */ + GST_LOG ("reviving buffer %p, %d", buffer, index); + gst_buffer_ref (GST_BUFFER (buffer)); + GST_BUFFER_SIZE (buffer) = 0; + pool->buffers[index] = buffer; + pool->queued[index] = 1; + g_cond_signal (pool->data_cond); + resuscitated = TRUE; + } + } else { + GST_LOG ("the pool is shutting down"); + } + g_mutex_unlock (pool->lock); + + if (!resuscitated) { + GST_LOG ("buffer %p not recovered, unmapping", buffer); + if (buffer->use_mmap) + munmap ((void *) GST_BUFFER_DATA (buffer), vbuffer->length); + gst_mini_object_unref (GST_MINI_OBJECT (pool)); + + if (!buffer->use_mmap) { + if (buffer->gbuffer) { + /* It was allocated with gst_pad_alloc_buffer */ + /* FIXME temporal fix for double free error */ + if (pool->is_vaapi_sharing) + gst_buffer_unref (buffer->gbuffer); + buffer->gbuffer = NULL; + } else { + /* It was allocated with posix_memalign */ + free (GST_BUFFER_DATA (buffer)); + } + } + + GST_BUFFER_DATA (buffer) = NULL; + + g_free (buffer->vbuffer); + + GST_LOG ("free v4l2buffer"); + GST_MINI_OBJECT_CLASS (v4l2buffer_parent_class)->finalize (GST_MINI_OBJECT + (buffer)); + } +} + +static void +gst_v4l2camsrc_buffer_class_init (gpointer g_class, gpointer class_data) +{ + GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class); + + v4l2buffer_parent_class = g_type_class_peek_parent (g_class); + + mini_object_class->finalize = + (GstMiniObjectFinalizeFunction) gst_v4l2camsrc_buffer_finalize; +} + +static GType +gst_v4l2camsrc_buffer_get_type (void) +{ + static GType _gst_v4l2camsrc_buffer_type; + + if (G_UNLIKELY (_gst_v4l2camsrc_buffer_type == 0)) { + static const GTypeInfo v4l2camsrc_buffer_info = { + sizeof (GstBufferClass), + NULL, + NULL, + gst_v4l2camsrc_buffer_class_init, + NULL, + NULL, + sizeof (GstV4l2Buffer), + 0, + NULL, + NULL + }; + _gst_v4l2camsrc_buffer_type = g_type_register_static (GST_TYPE_BUFFER, + "GstCameraBuffer", &v4l2camsrc_buffer_info, 0); + } + return _gst_v4l2camsrc_buffer_type; +} + +static GstV4l2Buffer * +gst_v4l2camsrc_buffer_new (GstMFLDV4l2CamSrcBufferPool * pool, + GstMFLDV4l2CamSrc * v4l2camsrc, guint index, GstCaps * caps) +{ + GstV4l2Buffer *ret = NULL; + GstFlowReturn flow_ret; + struct v4l2_buffer *vbuffer; + + ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2CAMSRC_BUFFER); + ret->use_mmap = v4l2camsrc->use_mmap; + vbuffer = ret->vbuffer = g_new0 (struct v4l2_buffer, 1); + GST_LOG ("creating buffer %u, %p in pool %p", index, ret, pool); + ret->pool = + (GstMFLDV4l2CamSrcBufferPool *) + gst_mini_object_ref (GST_MINI_OBJECT (pool)); + + vbuffer->index = index; + vbuffer->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (v4l2camsrc->use_mmap) + vbuffer->memory = V4L2_MEMORY_MMAP; + else + vbuffer->memory = V4L2_MEMORY_USERPTR; + + vbuffer->length = v4l2camsrc->frame_byte_size; + ret->gbuffer = NULL; + + if (ioctl (pool->video_fd, VIDIOC_QUERYBUF, vbuffer) < 0) + goto querybuf_failed; + + if (v4l2camsrc->use_mmap) { + void *data; + data = (guint8 *) mmap (0, vbuffer->length, + PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd, vbuffer->m.offset); + if (data == MAP_FAILED) + goto mmap_failed; + GST_BUFFER_DATA (ret) = (guint8 *) data; + } else { + if (gst_pad_is_linked (GST_BASE_SRC_PAD (v4l2camsrc))) { + GST_LOG ("using pad_alloc, size=%d", v4l2camsrc->frame_byte_size); + GST_LOG ("ALLOC CAPS: %" GST_PTR_FORMAT, caps); + + flow_ret = + gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (v4l2camsrc), 0LL, + v4l2camsrc->frame_byte_size, caps, &ret->gbuffer); + if (flow_ret != GST_FLOW_OK) + goto pad_alloc_failed; + GST_BUFFER_DATA (ret) = ret->gbuffer->data; + } else { + void *data; + + GST_LOG ("using posix_memalign"); + if (posix_memalign (&data, getpagesize (), vbuffer->length) != 0) { + goto memalign_failed; + } + GST_BUFFER_DATA (ret) = (guint8 *) data; + } + } + + GST_BUFFER_SIZE (ret) = v4l2camsrc->frame_byte_size; + GST_BUFFER_FLAG_SET (ret, GST_BUFFER_FLAG_READONLY); + gst_buffer_set_caps (GST_BUFFER (ret), caps); + +#ifdef USE_MLOCK + GST_DEBUG ("mlocking buffer data"); + if (mlock ((void *) GST_BUFFER_DATA (ret), v4l2camsrc->frame_byte_size) == -1) + goto mlock_failed; +#endif + + /* mlocking succeeded, now we can set the pointer to vbuffer. The existence + * of this pointer will be used later to determine if the munlock() is + * needed */ + if (!v4l2camsrc->use_mmap) + vbuffer->m.userptr = (unsigned int) GST_BUFFER_DATA (ret); + + +#if 1 + GST_LOG (" index: %u", vbuffer->index); + GST_LOG (" type: %d", vbuffer->type); + GST_LOG (" bytesused: %u", vbuffer->bytesused); + GST_LOG (" flags: %08x", vbuffer->flags); + GST_LOG (" field: %d", vbuffer->field); + GST_LOG (" memory: %d", vbuffer->memory); + if (vbuffer->memory == V4L2_MEMORY_MMAP) + GST_LOG (" MMAP offset: %u", vbuffer->m.offset); + else if (vbuffer->memory == V4L2_MEMORY_USERPTR) + GST_LOG (" user address: %u", vbuffer->m.userptr); + GST_LOG (" length: %u", vbuffer->length); + GST_LOG (" input: %u", vbuffer->input); +#endif + + return ret; + + /* ERRORS */ +#ifdef USE_MLOCK +mlock_failed: + { + GST_WARNING ("Failed to mlock memory: %s", g_strerror (errno)); + gst_buffer_unref (GST_BUFFER (ret)); + g_free (vbuffer); + return NULL; + } +#endif +querybuf_failed: + { + gint errnosave = errno; + + GST_WARNING ("Failed QUERYBUF: %s", g_strerror (errnosave)); + gst_buffer_unref (GST_BUFFER (ret)); + g_free (vbuffer); + errno = errnosave; + return NULL; + } +memalign_failed: + { + GST_WARNING ("Failed to posix_memalign a buffer"); + g_free (vbuffer); + return NULL; + } +pad_alloc_failed: + { + GST_WARNING ("Failed to pad_alloc_buffer: %s", + gst_flow_get_name (flow_ret)); + g_free (vbuffer); + return NULL; + } +mmap_failed: + { + gint errnosave = errno; + GST_WARNING ("Failed to mmap: %s", g_strerror (errnosave)); + gst_buffer_unref (GST_BUFFER (ret)); + g_free (vbuffer); + errno = errnosave; + return NULL; + } +} + + +#define GST_TYPE_V4L2CAMSRC_BUFFER_POOL (gst_v4l2camsrc_buffer_pool_get_type()) +#define GST_IS_V4L2CAMSRC_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L2CAMSRC_BUFFER_POOL)) +#define GST_V4L2CAMSRC_BUFFER_POOL(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L2CAMSRC_BUFFER_POOL, GstMFLDV4l2CamSrcBufferPool)) + +static GstMiniObjectClass *buffer_pool_parent_class = NULL; + +static void +gst_v4l2camsrc_buffer_pool_finalize (GstMFLDV4l2CamSrcBufferPool * pool) +{ + g_mutex_free (pool->lock); + pool->lock = NULL; + + if (pool->video_fd >= 0) + close (pool->video_fd); + + if (pool->buffers) { + g_free (pool->buffers); + pool->buffers = NULL; + } + + if (pool->queued) { + g_free (pool->queued); + pool->queued = NULL; + } + + g_cond_free (pool->data_cond); + pool->data_cond = NULL; + + GST_MINI_OBJECT_CLASS (buffer_pool_parent_class)->finalize (GST_MINI_OBJECT + (pool)); +} + +/* + */ +static void +gst_v4l2camsrc_buffer_pool_init (GstMFLDV4l2CamSrcBufferPool * pool, + gpointer g_class) +{ + pool->lock = g_mutex_new (); + pool->running = FALSE; + pool->num_live_buffers = 0; + pool->data_cond = g_cond_new (); + pool->is_vaapi_sharing = FALSE; +} + +/* + */ +static void +gst_v4l2camsrc_buffer_pool_class_init (gpointer g_class, gpointer class_data) +{ + GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class); + + buffer_pool_parent_class = g_type_class_peek_parent (g_class); + + mini_object_class->finalize = (GstMiniObjectFinalizeFunction) + gst_v4l2camsrc_buffer_pool_finalize; +} + +/* + */ +static GType +gst_v4l2camsrc_buffer_pool_get_type (void) +{ + static GType _gst_v4l2camsrc_buffer_pool_type; + + if (G_UNLIKELY (_gst_v4l2camsrc_buffer_pool_type == 0)) { + static const GTypeInfo v4l2camsrc_buffer_pool_info = { + sizeof (GstBufferClass), + NULL, + NULL, + gst_v4l2camsrc_buffer_pool_class_init, + NULL, + NULL, + sizeof (GstMFLDV4l2CamSrcBufferPool), + 0, + (GInstanceInitFunc) gst_v4l2camsrc_buffer_pool_init, + NULL + }; + _gst_v4l2camsrc_buffer_pool_type = + g_type_register_static (GST_TYPE_MINI_OBJECT, + "GstMFLDV4l2CamSrcBufferPool", &v4l2camsrc_buffer_pool_info, 0); + } + return _gst_v4l2camsrc_buffer_pool_type; +} + +/* + */ +static GstMFLDV4l2CamSrcBufferPool * +gst_v4l2camsrc_buffer_pool_new (GstMFLDV4l2CamSrc * v4l2camsrc, gint fd, + GstCaps * caps) +{ + GstMFLDV4l2CamSrcBufferPool *pool; + gint n; + + pool = (GstMFLDV4l2CamSrcBufferPool *) + gst_mini_object_new (GST_TYPE_V4L2CAMSRC_BUFFER_POOL); + + pool->video_fd = dup (fd); + if (pool->video_fd < 0) + goto dup_failed; + + GstStructure *structure = gst_caps_get_structure (caps, 0); + if (structure && gst_structure_has_name(structure, "video/x-vaapi-sharing")) + pool->is_vaapi_sharing = TRUE; + + pool->buffer_count = v4l2camsrc->num_buffers; + pool->buffers = g_new0 (GstV4l2Buffer *, pool->buffer_count); + pool->queued = g_new0 (guint, pool->buffer_count); + + for (n = 0; n < pool->buffer_count; n++) { + pool->buffers[n] = gst_v4l2camsrc_buffer_new (pool, v4l2camsrc, n, caps); + GST_LOG ("buffer ref is %d", GST_MINI_OBJECT_REFCOUNT (pool->buffers[n])); + if (!pool->buffers[n]) + goto buffer_new_failed; + } + + return pool; + + /* ERRORS */ +dup_failed: + { + gint errnosave = errno; + + gst_v4l2camsrc_buffer_pool_destroy (pool, v4l2camsrc); + + errno = errnosave; + + return NULL; + } +buffer_new_failed: + { + gint errnosave = errno; + + GST_LOG ("creating a new buffer failed"); + gst_mini_object_unref (GST_MINI_OBJECT (pool)); + + errno = errnosave; + + return NULL; + } +} + +/* + */ +static gboolean +gst_v4l2camsrc_buffer_pool_activate (GstMFLDV4l2CamSrcBufferPool * pool, + GstMFLDV4l2CamSrc * v4l2camsrc) +{ + gint n; + + g_mutex_lock (pool->lock); + + for (n = 0; n < pool->buffer_count; n++) { + + struct v4l2_buffer *buf; + + buf = (struct v4l2_buffer *) pool->buffers[n]->vbuffer; + + GST_LOG ("enqueue pool buffer %d", n); + + if (ioctl (pool->video_fd, VIDIOC_QBUF, buf) < 0) + goto queue_failed; + + pool->queued[n] = 1; + } + pool->running = TRUE; + + g_mutex_unlock (pool->lock); + + return TRUE; + + /* ERRORS */ +queue_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, READ, + ("Could not enqueue buffers in device '%s'.", + v4l2camsrc->videodev), + ("enqueing buffer %d/%d failed: %s", + n, v4l2camsrc->num_buffers, g_strerror (errno))); + g_mutex_unlock (pool->lock); + return FALSE; + } +} + +/* requeue buffers that are writable again */ +/* FIXME: This isn't needed anymore. Buffers are re-queued automatically + * when they are finalized, so there is no need to wait for them separately */ +static gboolean +gst_v4l2_buffer_pool_update (GstMFLDV4l2CamSrcBufferPool * pool, + GstMFLDV4l2CamSrc * v4l2camsrc) +{ + gint n; + gint ref = 0; + + g_mutex_lock (pool->lock); + + for (n = 0; n < pool->buffer_count; n++) { + if (!pool->queued[n]) { + GST_LOG ("buffer %d is dequeued", n); + ref++; + } + } + + /* if all the buffers are dequeued, wait */ + if (ref == v4l2camsrc->num_buffers) { + GST_LOG ("no free buffers available"); + g_cond_wait (pool->data_cond, pool->lock); + } + + g_mutex_unlock (pool->lock); + + return (ref != v4l2camsrc->num_buffers) ? TRUE : FALSE; +} + +/* + */ +static void +gst_v4l2camsrc_buffer_pool_destroy (GstMFLDV4l2CamSrcBufferPool * pool, + GstMFLDV4l2CamSrc * v4l2camsrc) +{ + gint n; + + g_mutex_lock (pool->lock); + pool->running = FALSE; + pool->is_vaapi_sharing = FALSE; + g_mutex_unlock (pool->lock); + + GST_DEBUG ("destroy pool"); + + /* after this point, no more buffers will be queued or dequeued; no buffer + * from pool->buffers that is NULL will be set to a buffer, and no buffer that + * is not NULL will be pushed out. */ + + /* miniobjects have no dispose, so they can't break ref-cycles, as buffers ref + * the pool, we need to unref the buffer to properly finalize te pool */ + for (n = 0; n < pool->buffer_count; n++) { + GstBuffer *buf; + + g_mutex_lock (pool->lock); + buf = GST_BUFFER (pool->buffers[n]); + g_mutex_unlock (pool->lock); + + if (buf) { +#ifdef USE_MLOCK + if (pool->buffers[n].m.userptr) { + GST_DEBUG ("munlocking buffer data"); + munlock ((void *) pool->buffers[n].m.userptr, + v4l2camsrc->frame_byte_size); + } +#endif + /* we own the ref if the buffer is in pool->buffers; drop it. */ + gst_buffer_unref (buf); + } + } + + gst_mini_object_unref (GST_MINI_OBJECT (pool)); +} + +/****************************************************** + * gst_v4l2camsrc_get_capture_capabilities(): + * get the device's capturing capabilities + * return value: TRUE on success, FALSE on error + ******************************************************/ +static gboolean +gst_v4l2camsrc_get_capture_capabilities (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + GST_DEBUG_OBJECT (v4l2camsrc, "getting capabilities"); + + if (!GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) + return FALSE; + + if (ioctl (v4l2camsrc->video_fd, VIDIOC_QUERYCAP, &v4l2camsrc->vcap) < 0) + goto cap_failed; + + GST_LOG_OBJECT (v4l2camsrc, "driver: '%s'", v4l2camsrc->vcap.driver); + GST_LOG_OBJECT (v4l2camsrc, "card: '%s'", v4l2camsrc->vcap.card); + GST_LOG_OBJECT (v4l2camsrc, "bus_info: '%s'", v4l2camsrc->vcap.bus_info); + GST_LOG_OBJECT (v4l2camsrc, "version: %08x", v4l2camsrc->vcap.version); + GST_LOG_OBJECT (v4l2camsrc, "capabilites: %08x", + v4l2camsrc->vcap.capabilities); + + return TRUE; + + /* ERRORS */ +cap_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Error getting capabilities for device '%s': " + "It isn't a v4l2 driver. Check if it is a v4l1 driver.", + v4l2camsrc->videodev), GST_ERROR_SYSTEM); + return FALSE; + } +} + + +/****************************************************** + * gst_v4l2camsrc_set_input_sensor(): + * set which sensor is the input of ISP + * return value: TRUE on success, FALSE on error + ******************************************************/ +static gboolean +gst_v4l2camsrc_set_input_sensor (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + V4L2CameraInputSensor select_sensor; + + GST_DEBUG_OBJECT (v4l2camsrc, "setting input sensor"); + + if (!GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) + return FALSE; + + if (v4l2camsrc->input_sensor == GST_CAMERA_INPUT_SENSOR_PRIMARY) + select_sensor = V2L2_CAMERA_INPUT_SENSOR_PRIMARY; + else + select_sensor =V2L2_CAMERA_INPUT_SENSOR_SECONDARY; + + if (ioctl (v4l2camsrc->video_fd, VIDIOC_S_INPUT, + &select_sensor) < 0) + goto s_input_failed; + + GST_LOG_OBJECT (v4l2camsrc, "set input sensor to: %d", + select_sensor); + + return TRUE; + + /* ERRORS */ +s_input_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Error setting input for device '%s': ", + v4l2camsrc->videodev), GST_ERROR_SYSTEM); + return FALSE; + } +} + +static gboolean +get_supported_mmfw_control (GstMFLDV4l2CamSrc * v4l2camsrc, struct v4l2_queryctrl *control) +{ + int i; + GST_DEBUG_OBJECT (v4l2camsrc, "set private control (%x)", control->id); + + for (i = 0; i < N_MMFW_CONTROLS; i++) { + if (mmfw_wb_controls[i].id == control->id) { + *control = mmfw_wb_controls[i]; + return TRUE; + } + } + return FALSE; +} + + +/****************************************************** + * gst_v4l2camsrc_fill_lists(): + * fill the lists of enumerations + * return value: TRUE on success, FALSE on error + ******************************************************/ +static gboolean +gst_v4l2camsrc_fill_lists (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + gint n; + + GST_DEBUG_OBJECT (v4l2camsrc, "getting enumerations"); + GST_V4L2CAMSRC_CHECK_OPEN (v4l2camsrc); + + GST_DEBUG_OBJECT (v4l2camsrc, " controls+menus"); + /* and lastly, controls+menus (if appropriate) */ + for (n = V4L2_CID_BASE;; n++) { + struct v4l2_queryctrl control = { 0, }; + GstCameraSrcColorBalanceChannel *v4l2channel; + + GstColorBalanceChannel *channel; + + /* when we reached the last official CID, continue with private CIDs */ + if (n == V4L2_CID_LASTP1) { + GST_DEBUG_OBJECT (v4l2camsrc, "checking private CIDs"); + n = V4L2_CID_PRIVATE_BASE; + /* FIXME: We are still not handling private controls. We need a + new GstInterface to export those controls */ + //break; + GST_DEBUG_OBJECT (v4l2camsrc, "private ID"); + } + + control.id = n; + + if( n > SOURCE_PRIV_BASE) { + if( n > MM_CAM_SOURCE_PRIV_LAST) + break; + else + get_supported_mmfw_control(v4l2camsrc, &control); + } + else if (ioctl (v4l2camsrc->video_fd, VIDIOC_QUERYCTRL, &control) < 0) { + if (errno == EINVAL) { + if (n < V4L2_CID_PRIVATE_BASE) + /* continue so that we also check private controls */ + continue; + else + break; + } else { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Failed getting controls attributes on device '%s.'", + v4l2camsrc->videodev), + ("Failed querying control %d on device '%s'. (%d - %s)", + n, v4l2camsrc->videodev, errno, strerror (errno))); + return FALSE; + } + } + if (control.flags & V4L2_CTRL_FLAG_DISABLED) + continue; + + switch (n) { + case V4L2_CID_BRIGHTNESS: + case V4L2_CID_CONTRAST: + case V4L2_CID_SATURATION: + case V4L2_CID_HUE: + case V4L2_CID_BLACK_LEVEL: + case V4L2_CID_AUTO_WHITE_BALANCE: + case V4L2_CID_DO_WHITE_BALANCE: + case V4L2_CID_RED_BALANCE: + case V4L2_CID_BLUE_BALANCE: + case V4L2_CID_GAMMA: + case V4L2_CID_EXPOSURE: + case V4L2_CID_AUTOGAIN: + case V4L2_CID_GAIN: + case V4L2_CID_COLORFX: + case MM_CAM_FILTER_WB_SOURCE_PRIV: + case MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV: + /* we only handle these for now (why?) */ + break; + case V4L2_CID_HFLIP: + case V4L2_CID_VFLIP: + case V4L2_CID_HCENTER: + case V4L2_CID_VCENTER: +#ifdef V4L2_CID_PAN_RESET + case V4L2_CID_PAN_RESET: +#endif +#ifdef V4L2_CID_TILT_RESET + case V4L2_CID_TILT_RESET: +#endif + /* not handled here, handled by VideoOrientation interface */ + control.id++; + break; + case V4L2_CID_AUDIO_VOLUME: + case V4L2_CID_AUDIO_BALANCE: + case V4L2_CID_AUDIO_BASS: + case V4L2_CID_AUDIO_TREBLE: + case V4L2_CID_AUDIO_MUTE: + case V4L2_CID_AUDIO_LOUDNESS: + /* FIXME: We should implement GstMixer interface */ + /* fall through */ + default: + GST_DEBUG_OBJECT (v4l2camsrc, + "ControlID %s (%x) unhandled, FIXME", control.name, n); + control.id++; + break; + } + if (n != control.id) + continue; + + GST_DEBUG_OBJECT (v4l2camsrc, "Adding ControlID %s (%x)", control.name, n); + v4l2channel = + g_object_new (GST_TYPE_CAMERA_SRC_COLOR_BALANCE_CHANNEL, NULL); + channel = GST_COLOR_BALANCE_CHANNEL (v4l2channel); + channel->label = g_strdup ((const gchar *) control.name); + + v4l2channel->id = n; + + switch (control.type) { + case V4L2_CTRL_TYPE_INTEGER: + channel->min_value = control.minimum; + channel->max_value = control.maximum; + break; + case V4L2_CTRL_TYPE_BOOLEAN: + channel->min_value = FALSE; + channel->max_value = TRUE; + break; + default: + /* FIXME we should find out how to handle V4L2_CTRL_TYPE_BUTTON. + BUTTON controls like V4L2_CID_DO_WHITE_BALANCE can just be set (1) or + unset (0), but can't be queried */ + GST_DEBUG_OBJECT (v4l2camsrc, + "Control with non supported type %s (%x), type=%d", + control.name, n, control.type); + channel->min_value = channel->max_value = 0; + break; + } + + gst_camerasrc_add_color_channel (GST_CAMERA_SRC (v4l2camsrc), channel); + } + + GST_DEBUG_OBJECT (v4l2camsrc, "done"); + return TRUE; +} + +/****************************************************** + * gst_v4l2camsrc_open(): + * open the video device (v4l2camsrc->videodev) + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_open (GstCameraSrc * camsrc) +{ + struct stat st; + GstPollFD pollfd = GST_POLL_FD_INIT; + + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + GST_DEBUG_OBJECT (v4l2camsrc, "Trying to open device %s", + v4l2camsrc->videodev); + + GST_V4L2CAMSRC_CHECK_NOT_OPEN (v4l2camsrc); + GST_V4L2CAMSRC_CHECK_NOT_ACTIVE (v4l2camsrc); + + /* be sure we have a device */ + if (!v4l2camsrc->videodev) + v4l2camsrc->videodev = g_strdup ("/dev/video"); + + /* check if it is a device */ + if (stat (v4l2camsrc->videodev, &st) == -1) + goto stat_failed; + + if (!S_ISCHR (st.st_mode)) + goto no_device; + + /* open the device */ + v4l2camsrc->video_fd = + open (v4l2camsrc->videodev, O_RDWR /* | O_NONBLOCK */ ); + + if (!GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) + goto not_open; + + /* get capabilities, error will be posted */ + if (!gst_v4l2camsrc_get_capture_capabilities (v4l2camsrc)) + goto error; + + /* get capabilities, error will be posted */ + if (!gst_v4l2camsrc_set_input_sensor (v4l2camsrc)) + goto error; + + /* do we need to be a capture device? */ + if (GST_IS_V4L2CAMSRC (v4l2camsrc) && + !(v4l2camsrc->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) + goto not_capture; + + /* Before iterating enumerations, clear the parent's color channel list */ + gst_camerasrc_clear_color_channels (camsrc); + + /* create enumerations, posts errors. */ + if (!gst_v4l2camsrc_fill_lists (v4l2camsrc)) + goto error; + + GST_INFO_OBJECT (v4l2camsrc, + "Opened device '%s' (%s) successfully", + v4l2camsrc->vcap.card, v4l2camsrc->videodev); + + pollfd.fd = v4l2camsrc->video_fd; + gst_poll_add_fd (v4l2camsrc->poll, &pollfd); + gst_poll_fd_ctl_read (v4l2camsrc->poll, &pollfd, TRUE); + + gst_v4l2camsrc_libmfldcam_init (v4l2camsrc); + + return TRUE; + + /* ERRORS */ +stat_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, NOT_FOUND, + ("Cannot identify device '%s'.", v4l2camsrc->videodev), + GST_ERROR_SYSTEM); + goto error; + } +no_device: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, NOT_FOUND, + ("This isn't a device '%s'.", v4l2camsrc->videodev), GST_ERROR_SYSTEM); + goto error; + } +not_open: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, OPEN_READ_WRITE, + ("Could not open device '%s' for reading and writing.", + v4l2camsrc->videodev), GST_ERROR_SYSTEM); + goto error; + } +not_capture: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, NOT_FOUND, + ("Device '%s' is not a capture device.", + v4l2camsrc->videodev), + ("Capabilities: 0x%x", v4l2camsrc->vcap.capabilities)); + goto error; + } +error: + { + if (GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) { + /* close device */ + close (v4l2camsrc->video_fd); + v4l2camsrc->video_fd = -1; + } + + return FALSE; + } +} + +/****************************************************** + * gst_v4l2camsrc_close(): + * close the video device (v4l2camsrc->video_fd) + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_close (GstCameraSrc * camsrc) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + GstPollFD pollfd = GST_POLL_FD_INIT; + + GST_DEBUG_OBJECT (v4l2camsrc, "Trying to close %s", v4l2camsrc->videodev); + + GST_V4L2CAMSRC_CHECK_OPEN (v4l2camsrc); + GST_V4L2CAMSRC_CHECK_NOT_ACTIVE (v4l2camsrc); + + /* close device */ + gst_v4l2camsrc_libmfldcam_deinit (v4l2camsrc); + + close (v4l2camsrc->video_fd); + pollfd.fd = v4l2camsrc->video_fd; + gst_poll_remove_fd (v4l2camsrc->poll, &pollfd); + v4l2camsrc->video_fd = -1; + + return TRUE; +} + +/****************************************************** + * gst_v4l2camsrc_get_attribute(): + * try to get the value of one specific attribute + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_get_attribute (GstCameraSrc * camsrc, + int attribute_num, int *value) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + struct v4l2_control control; + cam_err_t err; + int tmp_value; + + GST_DEBUG_OBJECT (v4l2camsrc, "getting value of attribute %d", attribute_num); + + if (!GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) + return FALSE; + + control.id = attribute_num; + + if (control.id > SOURCE_PRIV_BASE) { + switch (control.id) { + case MM_CAM_FILTER_WB_SOURCE_PRIV: + err = cam_feature_get (v4l2camsrc->video_fd, CAM_AWB_MODE, &tmp_value); + *value = find_item (gst_v4l2camsrc_wb_map, tmp_value); + break; + case MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV: + err = cam_feature_get (v4l2camsrc->video_fd, CAM_GENERAL_EFFECT_TYPE, &tmp_value); + *value = find_item (gst_v4l2camsrc_effect_map, tmp_value); + break; + default: + break; + } + } + else { + if (ioctl (v4l2camsrc->video_fd, VIDIOC_G_CTRL, &control) < 0) + goto ctrl_failed1; + *value = control.value; + } + + return TRUE; + +ctrl_failed1: + { + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_USER; + controls.count = 1; + controls.controls = &control; + + control.id = attribute_num; + + if (ioctl (v4l2camsrc->video_fd, VIDIOC_G_EXT_CTRLS, &controls) < 0) + goto ctrl_failed2; + + *value = control.value; + + return TRUE; + + } + + /* ERRORS */ +ctrl_failed2: + { + GST_ELEMENT_WARNING (v4l2camsrc, RESOURCE, SETTINGS, + ("Failed to get value for control %d on device '%s'.", + attribute_num, v4l2camsrc->videodev), GST_ERROR_SYSTEM); + return FALSE; + } +} + +/****************************************************** + * gst_v4l2camsrc_set_attribute(): + * try to set the value of one specific attribute + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_set_attribute (GstCameraSrc * camsrc, + int attribute_num, const int value) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + struct v4l2_control control; + + GST_DEBUG_OBJECT (v4l2camsrc, "setting value of attribute %d to %d", + attribute_num, value); + + if (!GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) + return FALSE; + + control.id = attribute_num; + control.value = value; + if (control.id > SOURCE_PRIV_BASE) { + switch (control.id) { + case MM_CAM_FILTER_WB_SOURCE_PRIV: + cam_feature_set (v4l2camsrc->video_fd, CAM_AWB_MODE, gst_v4l2camsrc_wb_map[value]); + break; + case MM_CAM_FILTER_COLOR_TONE_SOURCE_PRIV: + cam_feature_set (v4l2camsrc->video_fd, CAM_GENERAL_EFFECT_TYPE, gst_v4l2camsrc_effect_map[value]); + break; + + default: + break; + } + } + else { + if (ioctl (v4l2camsrc->video_fd, VIDIOC_S_CTRL, &control) < 0) + goto ctrl_failed1; + } + return TRUE; + +ctrl_failed1: + { + struct v4l2_ext_controls controls; + struct v4l2_ext_control control; + + controls.ctrl_class = V4L2_CTRL_CLASS_USER; + controls.count = 1; + controls.controls = &control; + + control.id = attribute_num; + control.value = value; + + if (ioctl (v4l2camsrc->video_fd, VIDIOC_S_EXT_CTRLS, &controls) < 0) + goto ctrl_failed2; + + return TRUE; + } + + /* ERRORS */ +ctrl_failed2: + { + GST_ELEMENT_WARNING (v4l2camsrc, RESOURCE, SETTINGS, + ("Failed to set value %d for control %d on device '%s'.", + value, attribute_num, v4l2camsrc->videodev), GST_ERROR_SYSTEM); + return FALSE; + } +} + + +/* complete made up ranking, the values themselves are meaningless */ +#define YUV_BASE_RANK 1000 +#define JPEG_BASE_RANK 500 +#define DV_BASE_RANK 200 +#define RGB_BASE_RANK 100 +#define YUV_ODD_BASE_RANK 50 +#define RGB_ODD_BASE_RANK 25 +#define BAYER_BASE_RANK 15 +#define S910_BASE_RANK 10 +#define GREY_BASE_RANK 5 +#define PWC_BASE_RANK 1 + +/* This flag is already used by libv4l2 although + * it was added to the Linux kernel in 2.6.32 + */ +#ifndef V4L2_FMT_FLAG_EMULATED +#define V4L2_FMT_FLAG_EMULATED 0x0002 +#endif + +static gint +gst_v4l2camsrc_format_get_rank (const struct v4l2_fmtdesc *fmt) +{ + guint32 fourcc = fmt->pixelformat; + gboolean emulated = ((fmt->flags & V4L2_FMT_FLAG_EMULATED) != 0); + gint rank = 0; + + switch (fourcc) { + case V4L2_PIX_FMT_MJPEG: + rank = JPEG_BASE_RANK; + break; + case V4L2_PIX_FMT_JPEG: + rank = JPEG_BASE_RANK + 1; + break; + case V4L2_PIX_FMT_MPEG: /* MPEG */ + rank = JPEG_BASE_RANK + 2; + break; + + case V4L2_PIX_FMT_RGB332: + case V4L2_PIX_FMT_RGB555: + case V4L2_PIX_FMT_RGB555X: + case V4L2_PIX_FMT_RGB565: + case V4L2_PIX_FMT_RGB565X: + rank = RGB_ODD_BASE_RANK; + break; + + case V4L2_PIX_FMT_RGB24: + case V4L2_PIX_FMT_BGR24: + rank = RGB_BASE_RANK - 1; + break; + + case V4L2_PIX_FMT_RGB32: + case V4L2_PIX_FMT_BGR32: + rank = RGB_BASE_RANK; + break; + + case V4L2_PIX_FMT_GREY: /* 8 Greyscale */ + rank = GREY_BASE_RANK; + break; + + case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */ + case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */ + case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */ + case V4L2_PIX_FMT_HI240: /* 8 8-bit color */ + case V4L2_PIX_FMT_NV16: /* 16 bit YUV 422, Y, UV plane */ + rank = YUV_ODD_BASE_RANK; + break; + case V4L2_PIX_FMT_YUV444: /* YUV 444, 24 bits per pixel */ + rank = YUV_BASE_RANK + 0; + break; + case V4L2_PIX_FMT_YVU410: /* YVU9, 9 bits per pixel */ + rank = YUV_BASE_RANK + 3; + break; + case V4L2_PIX_FMT_YUV410: /* YUV9, 9 bits per pixel */ + rank = YUV_BASE_RANK + 2; + break; + case V4L2_PIX_FMT_YUV420: /* I420, 12 bits per pixel */ + rank = YUV_BASE_RANK + 7; + break; + case V4L2_PIX_FMT_YUYV: /* YUY2, 16 bits per pixel */ + rank = YUV_BASE_RANK + 10; + break; + case V4L2_PIX_FMT_YVU420: /* YV12, 12 bits per pixel */ + rank = YUV_BASE_RANK + 6; + break; + case V4L2_PIX_FMT_UYVY: /* UYVY, 16 bits per pixel */ + rank = YUV_BASE_RANK + 9; + break; + case V4L2_PIX_FMT_Y41P: /* Y41P, 12 bits per pixel */ + rank = YUV_BASE_RANK + 5; + break; + case V4L2_PIX_FMT_YUV411P: /* Y41B, 12 bits per pixel */ + rank = YUV_BASE_RANK + 4; + break; + case V4L2_PIX_FMT_YUV422P: /* Y42B, 16 bits per pixel */ + rank = YUV_BASE_RANK + 8; + break; + + case V4L2_PIX_FMT_DV: + rank = DV_BASE_RANK; + break; + + case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */ + rank = 0; + break; + + case V4L2_PIX_FMT_SBGGR8: + case V4L2_PIX_FMT_SRGGB8: + case V4L2_PIX_FMT_SGBRG8: + case V4L2_PIX_FMT_SGRBG10: + case V4L2_PIX_FMT_SRGGB10: + case V4L2_PIX_FMT_SGBRG10: + rank = BAYER_BASE_RANK; + break; + +#ifdef V4L2_PIX_FMT_SN9C10X + case V4L2_PIX_FMT_SN9C10X: + rank = S910_BASE_RANK; + break; +#endif + +#ifdef V4L2_PIX_FMT_PWC1 + case V4L2_PIX_FMT_PWC1: + rank = PWC_BASE_RANK; + break; +#endif +#ifdef V4L2_PIX_FMT_PWC2 + case V4L2_PIX_FMT_PWC2: + rank = PWC_BASE_RANK; + break; +#endif + + default: + GST_LOG("Don't know how to rank pixelformat %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS(fourcc)); + rank = 0; + break; + } + + /* All ranks are below 1<<15 so a shift by 15 + * will a) make all non-emulated formats larger + * than emulated and b) will not overflow + */ + if (!emulated) + rank <<= 15; + + return rank; +} + +static gint +gst_v4l2camsrc_format_cmp_func (gconstpointer a, gconstpointer b) +{ + const struct v4l2_fmtdesc *fa = a; + const struct v4l2_fmtdesc *fb = b; + + if (fa->pixelformat == fb->pixelformat) + return 0; + + return gst_v4l2camsrc_format_get_rank (fb) - + gst_v4l2camsrc_format_get_rank (fa); +} + +/****************************************************** + * gst_v4l2camsrc_fill_format_list(): + * create list of supported capture formats + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_fill_format_list (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + gint n; + struct v4l2_fmtdesc *format; + + GST_DEBUG_OBJECT (v4l2camsrc, "getting src format enumerations"); + + /* format enumeration */ + for (n = 0;; n++) { + format = g_new0 (struct v4l2_fmtdesc, 1); + + format->index = n; + format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (ioctl (v4l2camsrc->video_fd, VIDIOC_ENUM_FMT, format) < 0) { + if (errno == EINVAL) { + g_free (format); + break; /* end of enumeration */ + } else { + goto failed; + } + } + + GST_LOG_OBJECT (v4l2camsrc, "index: %u", format->index); + GST_LOG_OBJECT (v4l2camsrc, "type: %d", format->type); + GST_LOG_OBJECT (v4l2camsrc, "flags: %08x", format->flags); + GST_LOG_OBJECT (v4l2camsrc, "description: '%s'", format->description); + GST_LOG_OBJECT (v4l2camsrc, "pixelformat: %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (format->pixelformat)); + + /* sort formats according to our preference; we do this, because caps + * are probed in the order the formats are in the list, and the order of + * formats in the final probed caps matters for things like fixation */ + v4l2camsrc->formats = g_slist_insert_sorted (v4l2camsrc->formats, format, + (GCompareFunc) gst_v4l2camsrc_format_cmp_func); + } + + GST_DEBUG_OBJECT (v4l2camsrc, "got %d format(s)", n); + + return TRUE; + + /* ERRORS */ +failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Failed to enumerate possible video formats device '%s' can work with", + v4l2camsrc->videodev), + ("Failed to get number %d in pixelformat enumeration for %s. (%d - %s)", + n, v4l2camsrc->videodev, errno, g_strerror (errno))); + g_free (format); + return FALSE; + } +} + +/****************************************************** + * gst_v4l2camsrc_clear_format_list(): + * free list of supported capture formats + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_clear_format_list (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + g_slist_foreach (v4l2camsrc->formats, (GFunc) g_free, NULL); + g_slist_free (v4l2camsrc->formats); + v4l2camsrc->formats = NULL; + + return TRUE; +} + +/* The frame interval enumeration code first appeared in Linux 2.6.19. */ +#ifdef VIDIOC_ENUM_FRAMEINTERVALS +static GstStructure * +gst_v4l2camsrc_probe_caps_for_format_and_size (GstMFLDV4l2CamSrc * v4l2camsrc, + guint32 pixelformat, + guint32 width, guint32 height, const GstStructure * template) +{ + gint fd = v4l2camsrc->video_fd; + struct v4l2_frmivalenum ival; + guint32 num, denom; + GstStructure *s; + GValue rates = { 0, }; + + memset (&ival, 0, sizeof (struct v4l2_frmivalenum)); + ival.index = 0; + ival.pixel_format = pixelformat; + ival.width = width; + ival.height = height; + + GST_LOG_OBJECT (v4l2camsrc, "get frame interval for %ux%u, %" + GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat)); + + /* keep in mind that v4l2 gives us frame intervals (durations); we invert the + * fraction to get framerate */ + if (ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) < 0) + goto enum_frameintervals_failed; + + if (ival.type == V4L2_FRMIVAL_TYPE_DISCRETE) { + GValue rate = { 0, }; + + g_value_init (&rates, GST_TYPE_LIST); + g_value_init (&rate, GST_TYPE_FRACTION); + + do { + num = ival.discrete.numerator; + denom = ival.discrete.denominator; + + if (num > G_MAXINT || denom > G_MAXINT) { + /* let us hope we don't get here... */ + num >>= 1; + denom >>= 1; + } + + GST_LOG_OBJECT (v4l2camsrc, "adding discrete framerate: %d/%d", + denom, num); + + /* swap to get the framerate */ + gst_value_set_fraction (&rate, denom, num); + gst_value_list_append_value (&rates, &rate); + + ival.index++; + } while (ioctl (fd, VIDIOC_ENUM_FRAMEINTERVALS, &ival) >= 0); + + /* FIXME MASSIVE UGLY HACK: Pretend to support 30/1 fps always */ + gst_value_set_fraction(&rate, 30, 1); + gst_value_list_append_value(&rates, &rate); + + } else if (ival.type == V4L2_FRMIVAL_TYPE_STEPWISE) { + GValue min = { 0, }; + GValue step = { 0, }; + GValue max = { 0, }; + gboolean added = FALSE; + guint32 minnum, mindenom; + guint32 maxnum, maxdenom; + + g_value_init (&rates, GST_TYPE_LIST); + + g_value_init (&min, GST_TYPE_FRACTION); + g_value_init (&step, GST_TYPE_FRACTION); + g_value_init (&max, GST_TYPE_FRACTION); + + /* get the min */ + minnum = ival.stepwise.min.numerator; + mindenom = ival.stepwise.min.denominator; + if (minnum > G_MAXINT || mindenom > G_MAXINT) { + minnum >>= 1; + mindenom >>= 1; + } + GST_LOG_OBJECT (v4l2camsrc, "stepwise min frame interval: %d/%d", minnum, + mindenom); + gst_value_set_fraction (&min, minnum, mindenom); + + /* get the max */ + maxnum = ival.stepwise.max.numerator; + maxdenom = ival.stepwise.max.denominator; + if (maxnum > G_MAXINT || maxdenom > G_MAXINT) { + maxnum >>= 1; + maxdenom >>= 1; + } + + GST_LOG_OBJECT (v4l2camsrc, "stepwise max frame interval: %d/%d", maxnum, + maxdenom); + gst_value_set_fraction (&max, maxnum, maxdenom); + + /* get the step */ + num = ival.stepwise.step.numerator; + denom = ival.stepwise.step.denominator; + if (num > G_MAXINT || denom > G_MAXINT) { + num >>= 1; + denom >>= 1; + } + + if (num == 0 || denom == 0) { + /* in this case we have a wrong fraction or no step, set the step to max + * so that we only add the min value in the loop below */ + num = maxnum; + denom = maxdenom; + } + + /* since we only have gst_value_fraction_subtract and not add, negate the + * numerator */ + GST_LOG_OBJECT (v4l2camsrc, "stepwise step frame interval: %d/%d", + num, denom); + + gst_value_set_fraction (&step, -num, denom); + + while (gst_value_compare (&min, &max) <= 0) { + GValue rate = { 0, }; + + num = gst_value_get_fraction_numerator (&min); + denom = gst_value_get_fraction_denominator (&min); + GST_LOG_OBJECT (v4l2camsrc, "adding stepwise framerate: %d/%d", + denom, num); + + /* invert to get the framerate */ + g_value_init (&rate, GST_TYPE_FRACTION); + gst_value_set_fraction (&rate, denom, num); + gst_value_list_append_value (&rates, &rate); + added = TRUE; + + /* we're actually adding because step was negated above. This is because + * there is no _add function... */ + if (!gst_value_fraction_subtract (&min, &min, &step)) { + GST_WARNING_OBJECT (v4l2camsrc, "could not step fraction!"); + break; + } + } + if (!added) { + /* no range was added, leave the default range from the template */ + GST_WARNING_OBJECT (v4l2camsrc, "no range added, leaving default"); + g_value_unset (&rates); + } + } else if (ival.type == V4L2_FRMIVAL_TYPE_CONTINUOUS) { + guint32 maxnum, maxdenom; + + g_value_init (&rates, GST_TYPE_FRACTION_RANGE); + + num = ival.stepwise.min.numerator; + denom = ival.stepwise.min.denominator; + if (num > G_MAXINT || denom > G_MAXINT) { + num >>= 1; + denom >>= 1; + } + + maxnum = ival.stepwise.max.numerator; + maxdenom = ival.stepwise.max.denominator; + if (maxnum > G_MAXINT || maxdenom > G_MAXINT) { + maxnum >>= 1; + maxdenom >>= 1; + } + + GST_LOG_OBJECT (v4l2camsrc, "continuous frame interval %d/%d to %d/%d", + maxdenom, maxnum, denom, num); + + gst_value_set_fraction_range_full (&rates, maxdenom, maxnum, denom, num); + } else { + goto unknown_type; + } + +return_data: + s = gst_structure_copy (template); + /* https://projects.maemo.org/bugzilla/show_bug.cgi?id=105590 */ + gst_structure_set (s, + "width", G_TYPE_INT, (gint) width, + "height", G_TYPE_INT, (gint) height, NULL); + + if (G_IS_VALUE (&rates)) { + /* only change the framerate on the template when we have a valid probed new + * value */ + gst_structure_set_value (s, "framerate", &rates); + g_value_unset (&rates); + } + return s; + + /* ERRORS */ +enum_frameintervals_failed: + { + GST_DEBUG_OBJECT (v4l2camsrc, + "Unable to enumerate intervals for %" GST_FOURCC_FORMAT "@%ux%u", + GST_FOURCC_ARGS (pixelformat), width, height); + goto return_data; + } +unknown_type: + { + /* I don't see how this is actually an error, we ignore the format then */ + GST_WARNING_OBJECT (v4l2camsrc, + "Unknown frame interval type at %" GST_FOURCC_FORMAT "@%ux%u: %u", + GST_FOURCC_ARGS (pixelformat), width, height, ival.type); + return NULL; + } +} +#endif /* defined VIDIOC_ENUM_FRAMEINTERVALS */ + + +/* + */ +static gint +compare_resolutions (gconstpointer a, gconstpointer b) +{ + GstStructure *as = (GstStructure *) a; + GstStructure *bs = (GstStructure *) b; + gint aw, bw, ah, bh; + + gst_structure_get_int (as, "width", &aw); + gst_structure_get_int (bs, "width", &bw); + gst_structure_get_int (as, "height", &ah); + gst_structure_get_int (bs, "height", &bh); + + /* FIXME: What is the best way to compare resolutions if their aspect + * ratio isn't the same? */ + return ((aw - bw) + (ah - bh)); +} + + +/* + */ +static gboolean +fractions_are_equal (gint num1, gint den1, gint num2, gint den2) +{ + GValue fraction1 = { 0, }, fraction2 = { + 0,}; + + g_value_init (&fraction1, GST_TYPE_FRACTION); + g_value_init (&fraction2, GST_TYPE_FRACTION); + gst_value_set_fraction (&fraction1, num1, den1); + gst_value_set_fraction (&fraction2, num2, den2); + /* we know we don't have to unset the values in this case */ + return (gst_value_compare (&fraction1, &fraction2) == GST_VALUE_EQUAL); +} + +static guint +gst_v4l2camsrc_get_frame_size (guint32 fourcc, guint * w, guint * h) +{ + guint outsize = 0; + + switch (fourcc) { + case V4L2_PIX_FMT_YUV420: + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2)); + break; + case V4L2_PIX_FMT_YUYV: + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; + case V4L2_PIX_FMT_Y41P: + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; + case V4L2_PIX_FMT_UYVY: + outsize = (GST_ROUND_UP_2 (*w) * 2) * *h; + break; + case V4L2_PIX_FMT_YVU420: + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * (GST_ROUND_UP_2 (*h) / 2)); + break; + case V4L2_PIX_FMT_YUV411P: + outsize = GST_ROUND_UP_4 (*w) * *h; + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 4) * *h); + break; + case V4L2_PIX_FMT_YUV422P: + outsize = GST_ROUND_UP_4 (*w) * *h; + outsize += 2 * ((GST_ROUND_UP_8 (*w) / 2) * *h); + break; + case V4L2_PIX_FMT_NV12: + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += (GST_ROUND_UP_4 (*w) * *h) / 2; + break; + case V4L2_PIX_FMT_NV21: + outsize = GST_ROUND_UP_4 (*w) * GST_ROUND_UP_2 (*h); + outsize += (GST_ROUND_UP_4 (*w) * *h) / 2; + break; + } + + return outsize; +} + +/* + */ +static gboolean +gst_v4l2camsrc_configure_device (GstMFLDV4l2CamSrc * v4l2camsrc, + guint32 * pixelformat, guint * width, guint * height, + guint * fps_n, guint * fps_d) +{ + gint fd = v4l2camsrc->video_fd; + struct v4l2_format format; + struct v4l2_streamparm stream; + gboolean ret = TRUE; + + GST_DEBUG_OBJECT (v4l2camsrc, "Configuring device to %dx%d, format " + "%" GST_FOURCC_FORMAT, *width, *height, GST_FOURCC_ARGS (*pixelformat)); + + GST_V4L2CAMSRC_CHECK_OPEN (v4l2camsrc); + + memset (&format, 0x00, sizeof (struct v4l2_format)); + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (ioctl (fd, VIDIOC_G_FMT, &format) < 0) + goto get_fmt_failed; + + if (v4l2camsrc->dump_raw) + format.type = V4L2_BUF_TYPE_PRIVATE; + else + format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + format.fmt.pix.width = *width; + format.fmt.pix.height = *height; + format.fmt.pix.pixelformat = *pixelformat; + /* request whole frames; change when gstreamer supports interlaced video + * (INTERLACED mode returns frames where the fields have already been + * combined, there are other modes for requesting fields individually) */ + format.fmt.pix.field = V4L2_FIELD_INTERLACED; + + if (ioctl (fd, VIDIOC_S_FMT, &format) < 0) { + if (errno != EINVAL) + goto set_fmt_failed; + + /* try again with progressive video */ + format.fmt.pix.width = *width; + format.fmt.pix.height = *height; + format.fmt.pix.pixelformat = *pixelformat; + format.fmt.pix.field = V4L2_FIELD_NONE; + if (ioctl (fd, VIDIOC_S_FMT, &format) < 0) + goto set_fmt_failed; + } + + if (v4l2camsrc->dump_raw) + v4l2camsrc->raw_output_size = format.fmt.pix.priv; + + if (format.fmt.pix.width != *width || format.fmt.pix.height != *height) + goto invalid_dimensions; + + if (format.fmt.pix.pixelformat != *pixelformat) + goto invalid_pixelformat; + + memset (&stream, 0x00, sizeof (struct v4l2_streamparm)); + stream.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl (fd, VIDIOC_G_PARM, &stream) < 0) { + GST_ELEMENT_WARNING (v4l2camsrc, RESOURCE, SETTINGS, + ("Could not get parameters on device '%s'", + v4l2camsrc->videodev), GST_ERROR_SYSTEM); + goto done; + } + + if (fps_n == NULL || fps_d == NULL) { + GST_LOG_OBJECT (v4l2camsrc, "Framerate will not be set"); + goto done; + } + + GST_LOG_OBJECT (v4l2camsrc, "Desired framerate: %u/%u", *fps_n, *fps_d); + + /* Note: V4L2 provides the frame interval, we have the frame rate */ + if (stream.parm.capture.timeperframe.denominator && + fractions_are_equal (stream.parm.capture.timeperframe.numerator, + stream.parm.capture.timeperframe.denominator, *fps_d, *fps_n)) { + GST_LOG_OBJECT (v4l2camsrc, "Desired framerate already set, nothing to do"); + goto done; + } + + /* We want to change the frame rate, so check whether we can. Some cheap USB + * cameras don't have the capability */ + if ((stream.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) { + GST_DEBUG_OBJECT (v4l2camsrc, "Not setting framerate (not supported)"); + goto done; + } + + GST_LOG_OBJECT (v4l2camsrc, "Setting framerate to %u/%u", *fps_n, *fps_d); + + /* Note: V4L2 wants the frame interval, we have the frame rate */ + stream.parm.capture.timeperframe.numerator = *fps_d; + stream.parm.capture.timeperframe.denominator = *fps_n; + + /* some cheap USB cam's won't accept any change */ + if (ioctl (fd, VIDIOC_S_PARM, &stream) < 0) { + GST_ELEMENT_WARNING (v4l2camsrc, RESOURCE, SETTINGS, + ("Video input device did not accept new frame rate setting."), + GST_ERROR_SYSTEM); + goto done; + } + + GST_INFO_OBJECT (v4l2camsrc, "Set frame interval to %u/%u", + stream.parm.capture.timeperframe.numerator, + stream.parm.capture.timeperframe.denominator); + + cam_set_frame_rate( ((gfloat)stream.parm.capture.timeperframe.denominator / stream.parm.capture.timeperframe.numerator)); + +done: + v4l2camsrc->max_zoom_factor = 10.0; + v4l2camsrc->frame_byte_size = gst_v4l2camsrc_get_frame_size (*pixelformat, + width, height); + /* v4l2camsrc->frame_byte_size = GST_ROUND_UP_2 (*width) * 2 * (*height); */ + + if (v4l2camsrc->bayer_downscaling) { + struct v4l2_crop crop; + memset (&v4l2camsrc->vcrop, 0, sizeof (struct v4l2_cropcap)); + v4l2camsrc->vcrop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (ioctl (v4l2camsrc->video_fd, VIDIOC_CROPCAP, &v4l2camsrc->vcrop) < 0) { + GST_DEBUG_OBJECT (v4l2camsrc, "Failed to query crop cap"); + goto no_crop; + } + + crop.c = v4l2camsrc->vcrop.defrect; /* reset to default */ + crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (-1 == ioctl (fd, VIDIOC_S_CROP, &crop)) { + switch (errno) { + case EINVAL: + GST_DEBUG_OBJECT (v4l2camsrc, "Crop not support\n"); + break; + default: + GST_DEBUG_OBJECT (v4l2camsrc, "Crop failed\n"); + break; + } + } + + goto no_crop; + + + + v4l2camsrc->crop_supported = TRUE; + + GST_DEBUG_OBJECT (v4l2camsrc, "got cropping bounds: x:%d, y:%d, w:%d, h:%d", + v4l2camsrc->vcrop.bounds.left, + v4l2camsrc->vcrop.bounds.top, + v4l2camsrc->vcrop.bounds.width, v4l2camsrc->vcrop.bounds.height); + + GST_DEBUG_OBJECT (v4l2camsrc, "cropping defrect: x:%d, y:%d, w:%d, h:%d", + v4l2camsrc->vcrop.defrect.left, + v4l2camsrc->vcrop.defrect.top, + v4l2camsrc->vcrop.defrect.width, v4l2camsrc->vcrop.defrect.height); + + gst_v4l2camsrc_update_cropping (v4l2camsrc, *width, *height, 1.0); + } + +no_crop: + cam_set_capture_fmt (v4l2camsrc->video_fd, *width, *height, *pixelformat); + + return ret; + + /* ERRORS */ +get_fmt_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Device '%s' does not support video capture", + v4l2camsrc->videodev), + ("Call to G_FMT failed: (%s)", g_strerror (errno))); + return FALSE; + } +set_fmt_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Device '%s' cannot capture at %dx%d", + v4l2camsrc->videodev, *width, *height), + ("Call to S_FMT failed for %" GST_FOURCC_FORMAT " @ %dx%d: %s", + GST_FOURCC_ARGS (*pixelformat), *width, *height, + g_strerror (errno))); + return FALSE; + } +invalid_dimensions: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Device '%s' cannot capture at %dx%d", + v4l2camsrc->videodev, *width, *height), + ("Tried to capture at %dx%d, but device returned size %dx%d", + *width, *height, format.fmt.pix.width, format.fmt.pix.height)); + return FALSE; + } +invalid_pixelformat: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, + ("Device '%s' cannot capture in the specified format", + v4l2camsrc->videodev), + ("Tried to capture in %" GST_FOURCC_FORMAT + ", but device returned format" " %" GST_FOURCC_FORMAT, + GST_FOURCC_ARGS (*pixelformat), + GST_FOURCC_ARGS (format.fmt.pix.pixelformat))); + return FALSE; + } +} + +static gboolean resolution_is_supported(int w, int h) +{ + return (w % 4) == 0 && (h % 4) == 0; +} + +#define probe_and_add_for_w_h(w, h) \ + do{ \ + GST_LOG(" w=%lu h=%lu",w,h); \ + tmp = gst_v4l2camsrc_probe_caps_for_format_and_size(v4l2camsrc, \ + pixelformat, w, h, template); \ + if(tmp) \ + res = g_list_prepend(res, tmp); \ + } while(0) + + +static GList *build_caps_list(GstMFLDV4l2CamSrc * v4l2camsrc, + guint32 pixelformat, const GstStructure * template, + struct v4l2_frmsizeenum *size) +{ + GstStructure *tmp = NULL; + GList *res = NULL; + long wl,wh,hl,hh,ws,hs; + + wl = size->stepwise.min_width; wh = size->stepwise.max_width; ws = size->stepwise.step_width; + hl = size->stepwise.min_height; hh = size->stepwise.max_height; hs = size->stepwise.step_height; + + probe_and_add_for_w_h(wl, hl); + probe_and_add_for_w_h(wh, hh); + + tmp = gst_structure_copy(template); + gst_structure_set (tmp, + "width", GST_TYPE_INT_RANGE, wl, wh, + "height", GST_TYPE_INT_RANGE, hl, hh, + NULL); + res = g_list_prepend(res, tmp); + + return res; +} + + +/** + * gst_v4l2camsrc_probe_caps_for_format: + * @v4l2camsrc: #GstMFLDV4l2CamSrc object + * @pixelformat: pixel format fourcc + * @template: GstStructure template to be used for generating the caps + * + * Creates GstCaps object which describes the supported caps of the device. + * + * Returns: GStCaps containing supported video resolutions. + */ +GstCaps * +gst_v4l2camsrc_probe_caps_for_format (GstMFLDV4l2CamSrc * v4l2camsrc, + guint32 pixelformat, const GstStructure * template) +{ + GstCaps *ret = gst_caps_new_empty (); + gint min_w, max_w, min_h, max_h; + GstStructure *tmp; + +#ifdef VIDIOC_ENUM_FRAMESIZES + { + gint fd = v4l2camsrc->video_fd; + struct v4l2_frmsizeenum size; + GList *results = NULL; + gint w, h; + + memset (&size, 0, sizeof (struct v4l2_frmsizeenum)); + size.index = 0; + size.pixel_format = pixelformat; + + if (ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) < 0) + goto enum_framesizes_failed; + + if (size.type == V4L2_FRMSIZE_TYPE_DISCRETE) { + do { + w = MIN (size.discrete.width, G_MAXINT); + h = MIN (size.discrete.height, G_MAXINT); + + if(resolution_is_supported(w, h)) { + tmp = gst_v4l2camsrc_probe_caps_for_format_and_size (v4l2camsrc, + pixelformat, w, h, template); + + if (tmp) + results = g_list_prepend (results, tmp); + } + + size.index++; + } while (ioctl (fd, VIDIOC_ENUM_FRAMESIZES, &size) >= 0); + } else if(size.type == V4L2_FRMSIZE_TYPE_STEPWISE + || size.type == V4L2_FRMSIZE_TYPE_CONTINUOUS) { + results = build_caps_list(v4l2camsrc, pixelformat, template, &size); + } else { + goto unknown_type; + } + + /* Sort the resolutions from smallest to largest */ + results = g_list_sort (results, compare_resolutions); + + while (results != NULL) { + tmp = GST_STRUCTURE (results->data); + + gst_caps_merge_structure (ret, tmp); + + results = g_list_delete_link (results, results); + } + + if (gst_caps_is_empty (ret)) + goto enum_framesizes_no_results; + + return ret; + + /* ERRORS */ + enum_framesizes_failed: + { + /* I don't see how this is actually an error */ + GST_DEBUG_OBJECT (v4l2camsrc, + "Failed to enumerate frame sizes for pixelformat %" GST_FOURCC_FORMAT + " (%s)", GST_FOURCC_ARGS (pixelformat), g_strerror (errno)); + goto default_frame_sizes; + } + enum_framesizes_no_results: + { + /* it's possible that VIDIOC_ENUM_FRAMESIZES is defined but the driver in + * question doesn't actually support it yet */ + GST_DEBUG_OBJECT (v4l2camsrc, "No results for pixelformat %" + GST_FOURCC_FORMAT " enumerating frame sizes, trying fallback", + GST_FOURCC_ARGS (pixelformat)); + goto default_frame_sizes; + } + unknown_type: + { + GST_WARNING_OBJECT (v4l2camsrc, + "Unknown frame sizeenum type for pixelformat %" GST_FOURCC_FORMAT + ": %u", GST_FOURCC_ARGS (pixelformat), size.type); + goto default_frame_sizes; + } + } +#endif /* defined VIDIOC_ENUM_FRAMESIZES */ + +default_frame_sizes: + + min_w = min_h = 1; + max_w = max_h = GST_CAMERA_SRC_MAX_SIZE; + if (!gst_v4l2camsrc_get_nearest_size (v4l2camsrc, pixelformat, + &min_w, &min_h)) { + GST_WARNING_OBJECT (v4l2camsrc, + "Could not probe minimum capture size for pixelformat %" + GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); + } + if (!gst_v4l2camsrc_get_nearest_size (v4l2camsrc, pixelformat, + &max_w, &max_h)) { + GST_WARNING_OBJECT (v4l2camsrc, + "Could not probe maximum capture size for pixelformat %" + GST_FOURCC_FORMAT, GST_FOURCC_ARGS (pixelformat)); + } + + tmp = gst_structure_copy (template); + gst_structure_set (tmp, + "width", GST_TYPE_INT_RANGE, min_w, max_w, + "height", GST_TYPE_INT_RANGE, min_h, max_h, NULL); + gst_caps_append_structure (ret, tmp); + + return ret; +} + +/* Crop to upper left corner of NV12 frame. */ +static gboolean crop_buffer_inplace_nv12(unsigned char *buf, unsigned in_w, unsigned in_h, + unsigned out_w, unsigned out_h) +{ + unsigned rows; + unsigned char *in, *out; + + if(in_w < out_w || in_h < out_h + || in_w % 4 || out_w % 4 + || in_h % 4 || out_h % 4) + return FALSE; + + in = buf; out = buf; + for(rows = 0; rows < out_h; ++rows) { + memmove(out, in, out_w); + in += in_w; + out += out_w; + } + in = buf + in_w * in_h; + for(rows = 0; rows < out_h / 2; ++rows) { + memmove(out, in, out_w); + in += in_w; + out += out_w; + } +#if 0 + /* B/W Marker to top left corner */ +#define put_dot(x,y,c) buf[(y)*out_w+x] = c + put_dot(0,0,0xff); put_dot(1,0,0x00); put_dot(2,0,0xff); put_dot(3,0,0x00); put_dot(4,0,0xff); put_dot(5,0,0x00); put_dot(6,0,0xff); put_dot(7,0,0x00); + put_dot(0,1,0x00); put_dot(1,1,0xff); put_dot(2,1,0x00); put_dot(3,1,0xff); put_dot(4,1,0x00); put_dot(5,1,0xff); put_dot(6,1,0x00); put_dot(7,1,0xff); + put_dot(0,2,0xff); put_dot(1,2,0x00); + put_dot(0,3,0x00); put_dot(1,3,0xff); + put_dot(0,4,0xff); put_dot(1,4,0x00); + put_dot(0,5,0x00); put_dot(1,5,0xff); + put_dot(0,6,0xff); put_dot(1,6,0x00); + put_dot(0,7,0x00); put_dot(1,7,0xff); +#undef put_dot +#endif + + return TRUE; +} + +/** + * gst_v4l2camsrc_libmfld_grab_hq_frame: + * @v4l2camsrc: #GstMFLDV4l2CamSrc object + * @buf: Pointer to v4l2_buffer where to place the image frame + * + * Grab a High-Quality image by using libmfldcam. + * + * Returns: TRUE if capture succeeds, FALSE otherwise + */ +gboolean +gst_v4l2camsrc_libmfld_grab_hq_frame (GstMFLDV4l2CamSrc * v4l2camsrc, + struct v4l2_buffer * buffer) +{ + cam_err_t err; + gboolean ret; + struct cam_capture_settings st; + + st.mmap = v4l2camsrc->use_mmap; + st.ae = v4l2camsrc->ae_enabled; + st.af = v4l2camsrc->af_enabled; + st.awb = v4l2camsrc->awb_enabled; + st.dump_raw = v4l2camsrc->dump_raw; + st.raw_output_size = v4l2camsrc->raw_output_size; + st.dump_image = v4l2camsrc->dump_image; + + err = cam_capture_frames (v4l2camsrc->video_fd, buffer, &st); + if (v4l2camsrc->ae_enabled || v4l2camsrc->af_enabled || + v4l2camsrc->awb_enabled) + mfldcam_3a_start (); + ret = (err == CAM_ERR_NONE); + + return ret; +} + +/****************************************************** + * gst_v4l2camsrc_grab_frame (): + * grab a frame for capturing + * return value: GST_FLOW_OK, GST_FLOW_WRONG_STATE or GST_FLOW_ERROR + ******************************************************/ +GstFlowReturn +gst_v4l2camsrc_grab_frame (GstCameraSrc * camsrc, GstBuffer ** buf, + GstCameraCapturePhase phase) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + +#define NUM_TRIALS 5000 + struct v4l2_buffer buffer; + gint32 trials = NUM_TRIALS; + GstBuffer *pool_buffer; + gboolean need_copy = v4l2camsrc->use_copy; + gboolean variable_frame_size = FALSE; /* FIXME if we ever support JPEG etc */ + gint index; + gint ret; + unsigned real_w; + + /* wait if all buffers are DQBuf */ + g_mutex_lock (v4l2camsrc->device_mutex); + gst_v4l2_buffer_pool_update (v4l2camsrc->pool, v4l2camsrc); + g_mutex_unlock (v4l2camsrc->device_mutex); + + memset (&buffer, 0x00, sizeof (buffer)); + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + if (v4l2camsrc->use_mmap) + buffer.memory = V4L2_MEMORY_MMAP; + else + buffer.memory = V4L2_MEMORY_USERPTR; + + if (phase == GST_CAMERA_CAPTURE) { + gst_camerasrc_send_capture_start_message (camsrc); + } + + for (;;) { + ret = gst_poll_wait (v4l2camsrc->poll, V4L2CAMSRC_POLL_TIMEOUT); + if (G_UNLIKELY (ret < 0)) { + if (errno == EBUSY) + goto stopped; + if (errno != EAGAIN && errno != EINTR) + goto select_error; + } + + if (G_UNLIKELY (ret == 0)) { + goto timeout; + } + g_mutex_lock (v4l2camsrc->device_mutex); + ret = gst_v4l2camsrc_libmfld_grab_hq_frame (v4l2camsrc, &buffer); + g_mutex_unlock (v4l2camsrc->device_mutex); + + if (ret >= 0) + break; + + GST_WARNING_OBJECT (v4l2camsrc, + "problem grabbing frame %d (ix=%d), trials=%d, pool-ct=%d, buf.flags=%d", + buffer.sequence, buffer.index, trials, + GST_MINI_OBJECT_REFCOUNT (v4l2camsrc->pool), buffer.flags); + + /* if the sync() got interrupted, we can retry */ + switch (errno) { + case EAGAIN: + GST_WARNING_OBJECT (v4l2camsrc, + "Non-blocking I/O has been selected using O_NONBLOCK and" + " no buffer was in the outgoing queue. device %s", + v4l2camsrc->videodev); + break; + case EINVAL: + goto einval; + case ENOMEM: + goto enomem; + case EIO: + GST_INFO_OBJECT (v4l2camsrc, + "VIDIOC_DQBUF failed due to an internal error." + " Can also indicate temporary problems like signal loss." + " Note the driver might dequeue an (empty) buffer despite" + " returning an error, or even stop capturing." + " device %s", v4l2camsrc->videodev); + /* have we de-queued a buffer ? */ + if (!(buffer.flags & (V4L2_BUF_FLAG_QUEUED | V4L2_BUF_FLAG_DONE))) { + /* this fails + if ((buffer.index >= 0) && (buffer.index < v4l2camsrc->breq.count)) { + GST_DEBUG_OBJECT (v4l2camsrc, "reenqueing buffer (ix=%ld)", buffer.index); + gst_v4l2camsrc_queue_frame (v4l2camsrc, buffer.index); + } + else { + */ + GST_DEBUG_OBJECT (v4l2camsrc, "reenqueing buffer"); + /* FIXME: this is not a good idea, as drivers usualy return the buffer + * with index-number set to 0, thus the re-enque will fail unless it + * was incidentialy 0. + * We could try to re-enque all buffers without handling the ioctl + * return. + */ + /* + if (ioctl (v4l2camsrc->video_fd, VIDIOC_QBUF, &buffer) < 0) { + goto qbuf_failed; + } + */ + /*} */ + } + break; + case EINTR: + GST_WARNING_OBJECT (v4l2camsrc, + "could not sync on a buffer on device %s", v4l2camsrc->videodev); + break; + default: + GST_WARNING_OBJECT (v4l2camsrc, + "Grabbing frame got interrupted on %s. No expected reason.", + v4l2camsrc->videodev); + break; + } + + /* check nr. of attempts to capture */ + if (--trials == -1) { + goto too_many_trials; + } else { + memset (&buffer, 0x00, sizeof (buffer)); + buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (v4l2camsrc->use_mmap) + buffer.memory = V4L2_MEMORY_MMAP; + else + buffer.memory = V4L2_MEMORY_USERPTR; + } + } + + if (phase == GST_CAMERA_CAPTURE) { + gst_camerasrc_send_capture_stop_message (camsrc); + } else { + /* Send the CAF status message */ + GstCameraFocusStatus focus_status; + gboolean lret; + lret = gst_v4l2camsrc_check_focus_status(v4l2camsrc, &focus_status, FALSE); + if (lret) { + GST_LOG_OBJECT (v4l2camsrc, "CAF status: %d", focus_status.status); + if (focus_status.status == GST_PHOTOGRAPHY_FOCUS_STATUS_SUCCESS ) + gst_camerasrc_send_af_status(GST_CAMERA_SRC (v4l2camsrc) ,CAMERASRC_AUTO_FOCUS_RESULT_FOCUSED); + else if (focus_status.status == GST_PHOTOGRAPHY_FOCUS_STATUS_FAIL ) + gst_camerasrc_send_af_status(GST_CAMERA_SRC (v4l2camsrc) ,CAMERASRC_AUTO_FOCUS_RESULT_FAILED); + else { + GST_WARNING_OBJECT (v4l2camsrc, "CAF status erronous: %d send as failed", focus_status.status); + gst_camerasrc_send_af_status(GST_CAMERA_SRC (v4l2camsrc) ,CAMERASRC_AUTO_FOCUS_RESULT_FAILED); + } + } + } + + + g_mutex_lock (v4l2camsrc->pool->lock); + + index = buffer.index; + + /* get our GstBuffer with that index from the pool, if the buffer is + * in use (NULL) we have a serious problem (should only be possible in mmap + * case). */ + pool_buffer = GST_BUFFER (v4l2camsrc->pool->buffers[index]); + + if (G_UNLIKELY (pool_buffer == NULL)) + goto no_buffer; + + GST_LOG_OBJECT (v4l2camsrc, "grabbed buffer %p at index %d (refct = %d)", + pool_buffer, index, GST_MINI_OBJECT_REFCOUNT (pool_buffer)); + + /* ref the buffer and requeue, when if becomes writable again */ + v4l2camsrc->pool->buffers[index] = NULL; + v4l2camsrc->pool->num_live_buffers++; + v4l2camsrc->pool->queued[index] = 0; + + /* SW workaround for ISP padding. */ + + if(v4l2camsrc->expected_capture_w < 320 && (v4l2camsrc->expected_capture_w % 32 != 0) + && v4l2camsrc->expected_capture_fourcc == V4L2_PIX_FMT_NV12 + && !v4l2camsrc->disable_low_res_crop) { + + real_w = (v4l2camsrc->expected_capture_w & ~31) + 32; + + GST_LOG_OBJECT(v4l2camsrc, "Downscaling padded buffer with cropping (%u,%u) -> (%u,%u)", + real_w, v4l2camsrc->expected_capture_h, v4l2camsrc->expected_capture_w, v4l2camsrc->expected_capture_h); + + if(!crop_buffer_inplace_nv12(GST_BUFFER_DATA(pool_buffer), + real_w, v4l2camsrc->expected_capture_h, + v4l2camsrc->expected_capture_w, v4l2camsrc->expected_capture_h)) { + GST_ERROR_OBJECT(v4l2camsrc,"Software crop failed"); + } + } + + g_mutex_unlock (v4l2camsrc->pool->lock); + + /* this can change at every frame, esp. with jpeg */ + if(variable_frame_size) + GST_BUFFER_SIZE (pool_buffer) = buffer.bytesused; + else + GST_BUFFER_SIZE (pool_buffer) = v4l2camsrc->frame_byte_size; + + GST_BUFFER_OFFSET (pool_buffer) = v4l2camsrc->offset++; + GST_BUFFER_OFFSET_END (pool_buffer) = v4l2camsrc->offset; + + /* timestamps, LOCK to get clock and base time. */ + { + GstClock *clock; + GstClockTime timestamp; + + GST_OBJECT_LOCK (v4l2camsrc); + if ((clock = GST_ELEMENT_CLOCK (v4l2camsrc))) { + /* we have a clock, get base time and ref clock */ + timestamp = GST_ELEMENT (v4l2camsrc)->base_time; + gst_object_ref (clock); + } else { + /* no clock, can't set timestamps */ + timestamp = GST_CLOCK_TIME_NONE; + } + GST_OBJECT_UNLOCK (v4l2camsrc); + + if (clock) { + GstClockTime latency; + + /* the time now is the time of the clock minus the base time */ + timestamp = gst_clock_get_time (clock) - timestamp; + gst_object_unref (clock); + + latency = + gst_util_uint64_scale_int (GST_SECOND, camsrc->fps_d, camsrc->fps_n); + + if (timestamp > latency) + timestamp -= latency; + else + timestamp = 0; + GST_BUFFER_DURATION (pool_buffer) = latency; + } + /* activate settings for next frame (Code from v4l2src) */ + if (GST_CLOCK_TIME_IS_VALID (camsrc->duration)) { + v4l2camsrc->ctrl_time += camsrc->duration; + } else { + v4l2camsrc->ctrl_time = timestamp; + } + gst_object_sync_values (G_OBJECT (camsrc), v4l2camsrc->ctrl_time); + + /* FIXME: use the timestamp from the buffer itself! */ + GST_BUFFER_TIMESTAMP (pool_buffer) = timestamp; + } + + if (G_UNLIKELY (need_copy)) { + GST_DEBUG_OBJECT (v4l2camsrc, "copying buffer"); + *buf = gst_buffer_copy (pool_buffer); + GST_BUFFER_FLAG_UNSET (*buf, GST_BUFFER_FLAG_READONLY); + /* this will requeue */ + gst_buffer_unref (pool_buffer); + } else { + if (v4l2camsrc->pool->is_vaapi_sharing) + GST_BUFFER_DATA(pool_buffer) = (GST_V4L2CAMSRC_BUFFER(pool_buffer))->gbuffer; + *buf = pool_buffer; + } + + GST_LOG_OBJECT (v4l2camsrc, + "grabbed frame %d (ix=%d), flags %08x, pool-ct=%d", buffer.sequence, + buffer.index, buffer.flags, v4l2camsrc->pool->num_live_buffers); + + return GST_FLOW_OK; + + /* ERRORS */ +select_error: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, READ, (NULL), + ("select error %d: %s (%d)", ret, g_strerror (errno), errno)); + return GST_FLOW_ERROR; + } +stopped: + { + GST_DEBUG ("stop called"); + return GST_FLOW_WRONG_STATE; + } +einval: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, FAILED, + ("Failed trying to get video frames from device '%s'.", + v4l2camsrc->videodev), + ("The buffer type is not supported, or the index is out of bounds," + " or no buffers have been allocated yet, or the userptr" + " or length are invalid. device %s", v4l2camsrc->videodev)); + return GST_FLOW_ERROR; + } +enomem: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, FAILED, + ("Failed trying to get video frames from device '%s'. Not enough memory.", + v4l2camsrc->videodev), ("insufficient memory to enqueue a user " + "pointer buffer. device %s.", v4l2camsrc->videodev)); + return GST_FLOW_ERROR; + } +too_many_trials: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, FAILED, + ("Failed trying to get video frames from device '%s'.", + v4l2camsrc->videodev), + ("Failed after %d tries. device %s. system error: %s", + NUM_TRIALS, v4l2camsrc->videodev, g_strerror (errno))); + return GST_FLOW_ERROR; + } +no_buffer: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, FAILED, + ("Failed trying to get video frames from device '%s'.", + v4l2camsrc->videodev), + ("No free buffers found in the pool at index %d.", index)); + g_mutex_unlock (v4l2camsrc->pool->lock); + return GST_FLOW_ERROR; + } +/* +qbuf_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, WRITE, + ("Could not exchange data with device '%s'.", + v4l2camsrc->videodev), + ("Error queueing buffer on device %s. system error: %s", + v4l2camsrc->videodev, g_strerror (errno))); + return GST_FLOW_ERROR; + } +*/ +timeout: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, FAILED, + ("Timeout when trying to get video frames from device '%s'.", + v4l2camsrc->videodev), NULL); + return GST_FLOW_ERROR; + } +} + + +/****************************************************** + * gst_v4l2camsrc_set_capture(): + * set capture parameters for certain operation mode + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_set_capture (GstCameraSrc * camsrc, GstOperationMode mode, + gboolean try_only, guint32 * pixelformat, guint * width, guint * height, + guint * fps_n, guint * fps_d) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + cam_err_t err = CAM_ERR_NONE; + gboolean ret = TRUE; + + GST_DEBUG_OBJECT (v4l2camsrc, "mode = %d, try_only = %d", mode, try_only); + + v4l2camsrc->expected_capture_w = *width; + v4l2camsrc->expected_capture_h = *height; + v4l2camsrc->expected_capture_fourcc = *pixelformat; + + if (mode == GST_PHOTOGRAPHY_OPERATION_MODE_PREVIEW) { + return FALSE; + } else if (mode == GST_PHOTOGRAPHY_OPERATION_MODE_IMAGE_CAPTURE) { + GST_DEBUG_OBJECT (v4l2camsrc, "Image capture: %dx%d, format " + "%" GST_FOURCC_FORMAT, *width, *height, GST_FOURCC_ARGS (*pixelformat)); + + if (try_only) { + /* Round width & height down to be dividable by 8 */ + *width = (*width) & ~7; + *height = (*height) & ~7; + ret = TRUE; + } else { + ret = gst_v4l2camsrc_configure_device (v4l2camsrc, pixelformat, + width, height, fps_n, fps_d); + + v4l2camsrc->tmp_num_buffers = v4l2camsrc->num_buffers; + v4l2camsrc->num_buffers = 1; + + v4l2camsrc->capture_w = *width; + v4l2camsrc->capture_h = *height; + if (fps_n && fps_d) { + v4l2camsrc->capture_fps_n = *fps_n; + v4l2camsrc->capture_fps_d = *fps_d; + } + v4l2camsrc->capture_fourcc = *pixelformat; + } + } else { + /* VIEWFINDER mode */ + /* FIXME: handle try_only mode */ + + v4l2camsrc->num_buffers = v4l2camsrc->tmp_num_buffers; + + ret = gst_v4l2camsrc_configure_device (v4l2camsrc, pixelformat, + width, height, fps_n, fps_d); + + v4l2camsrc->vf_w = *width; + v4l2camsrc->vf_h = *height; + v4l2camsrc->vf_fourcc = *pixelformat; + if (fps_n && fps_d) { + v4l2camsrc->vf_fps_n = *fps_n; + v4l2camsrc->vf_fps_d = *fps_d; + } + } + if (err != CAM_ERR_NONE) + ret = FALSE; + + return ret; +} + +/****************************************************** + * gst_v4l2camsrc_capture_start(): + * Start capturing frames from the device + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_capture_start (GstCameraSrc * camsrc, GstCaps * caps) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + gint fd = v4l2camsrc->video_fd; + struct v4l2_requestbuffers breq; + gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + cam_capture_mode_t cam_mode; + cam_err_t err; + + v4l2camsrc->offset = 0; + + /* activate settings for first frame */ + v4l2camsrc->ctrl_time = 0; + gst_object_sync_values (G_OBJECT (camsrc), v4l2camsrc->ctrl_time); + + memset (&breq, 0, sizeof (struct v4l2_requestbuffers)); + + GST_DEBUG_OBJECT (v4l2camsrc, "initializing the capture system"); + + GST_V4L2CAMSRC_CHECK_OPEN (v4l2camsrc); + GST_V4L2CAMSRC_CHECK_NOT_ACTIVE (v4l2camsrc); + + if (!(v4l2camsrc->vcap.capabilities & V4L2_CAP_STREAMING)) + goto no_capture_method; + + GST_DEBUG_OBJECT (v4l2camsrc, "STREAMING, requesting %d CAPTURE buffers", + v4l2camsrc->num_buffers); + + err = cam_driver_set_mipi_interrupt(fd,1); + if (err != CAM_ERR_NONE) + GST_DEBUG_OBJECT (v4l2camsrc, "MIPI interrupt enable FAILED !"); + + breq.count = v4l2camsrc->num_buffers; + breq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if (v4l2camsrc->use_mmap) + breq.memory = V4L2_MEMORY_MMAP; + else + breq.memory = V4L2_MEMORY_USERPTR; + + if (ioctl (fd, VIDIOC_REQBUFS, &breq) < 0) + goto reqbufs_failed; + + GST_LOG_OBJECT (v4l2camsrc, " count: %u", breq.count); + GST_LOG_OBJECT (v4l2camsrc, " type: %d", breq.type); + GST_LOG_OBJECT (v4l2camsrc, " memory: %d", breq.memory); + + if (breq.count < GST_V4L2CAMSRC_MIN_BUFFERS) + goto no_buffers; + + if (v4l2camsrc->num_buffers != breq.count) { + GST_WARNING_OBJECT (v4l2camsrc, "using %u buffers instead", breq.count); + v4l2camsrc->num_buffers = breq.count; + g_object_notify (G_OBJECT (v4l2camsrc), "queue-size"); + } + + /* Map the buffers */ + GST_LOG_OBJECT (v4l2camsrc, "initiating buffer pool"); + + if (!(v4l2camsrc->pool = + gst_v4l2camsrc_buffer_pool_new (v4l2camsrc, fd, caps))) + goto buffer_pool_new_failed; + + GST_INFO_OBJECT (v4l2camsrc, "capturing buffers"); + + GST_V4L2CAMSRC_SET_ACTIVE (v4l2camsrc); + + GST_DEBUG_OBJECT (v4l2camsrc, "starting the capturing"); + GST_V4L2CAMSRC_CHECK_ACTIVE (v4l2camsrc); + + if (!gst_v4l2camsrc_buffer_pool_activate (v4l2camsrc->pool, v4l2camsrc)) + goto pool_activate_failed; + + if (ioctl (fd, VIDIOC_STREAMON, &type) < 0) + goto streamon_failed; + + v4l2camsrc->is_active = TRUE; + GST_DEBUG_OBJECT (v4l2camsrc, "STREAMON called"); + + /* Update the focus only when the Sensor is stream on */ + if (v4l2camsrc->focus_updated) { + cam_driver_set_focus_posi(v4l2camsrc->video_fd, v4l2camsrc->focus_posi); + v4l2camsrc->focus_updated = FALSE; + } + + if (v4l2camsrc->zoom_updated) { + gst_v4l2camsrc_libmfldcam_set_zoom(v4l2camsrc, v4l2camsrc->zoom_factor); + v4l2camsrc->zoom_updated = FALSE; + } + + cam_mode = find_item (gst_v4l2camsrc_capture_map,v4l2camsrc->capture_mode); + cam_set_capture_mode (v4l2camsrc->video_fd, cam_mode); + + return TRUE; + + /* ERRORS */ +reqbufs_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, READ, + ("Could not get buffers from device '%s'.", + v4l2camsrc->videodev), + ("error requesting %d buffers: %s", + v4l2camsrc->num_buffers, g_strerror (errno))); + cam_driver_set_mipi_interrupt(fd, 0); + return FALSE; + } +no_buffers: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, READ, + ("Could not get enough buffers from device '%s'.", + v4l2camsrc->videodev), + ("we received %d from device '%s', we want at least %d", + breq.count, v4l2camsrc->videodev, GST_V4L2CAMSRC_MIN_BUFFERS)); + cam_driver_set_mipi_interrupt(fd, 0); + return FALSE; + } +buffer_pool_new_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, READ, + ("Could not map buffers from device '%s'", + v4l2camsrc->videodev), + ("Failed to create buffer pool: %s", g_strerror (errno))); + cam_driver_set_mipi_interrupt(fd, 0); + return FALSE; + } +no_capture_method: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, READ, + ("The driver of device '%s' does not support streaming. ", + v4l2camsrc->videodev), (NULL)); + cam_driver_set_mipi_interrupt(fd, 0); + return FALSE; + } +pool_activate_failed: + { + /* already errored */ + cam_driver_set_mipi_interrupt(fd, 0); + return FALSE; + } +streamon_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, OPEN_READ, + ("Error starting streaming capture from device '%s'.", + v4l2camsrc->videodev), GST_ERROR_SYSTEM); + cam_driver_set_mipi_interrupt(fd, 0); + return FALSE; + } +} + +/****************************************************** + * gst_v4l2camsrc_capture_stop(): + * stop streaming capture + * return value: TRUE on success, FALSE on error + ******************************************************/ +gboolean +gst_v4l2camsrc_capture_stop (GstCameraSrc * camsrc) +{ + GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc); + struct v4l2_requestbuffers breq; + gint fd = v4l2camsrc->video_fd; + cam_err_t err; + + gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + memset(&breq, 0, sizeof(struct v4l2_requestbuffers)); + + GST_DEBUG_OBJECT (v4l2camsrc, "stopping capturing"); + + if (!GST_V4L2CAMSRC_IS_OPEN (v4l2camsrc)) { + return TRUE; + } + if (!GST_V4L2CAMSRC_IS_ACTIVE (v4l2camsrc)) { + return TRUE; + } + + err = cam_driver_set_mipi_interrupt(fd,0); + if (err != CAM_ERR_NONE) + GST_DEBUG_OBJECT (v4l2camsrc, "MIPI interrupt disable FAILED !"); + + /* we actually need to sync on all queued buffers but not + * on the non-queued ones */ + if (ioctl (v4l2camsrc->video_fd, VIDIOC_STREAMOFF, &type) < 0) + goto streamoff_failed; + + GST_DEBUG_OBJECT (v4l2camsrc, "STREAMOFF called"); + + breq.count = 0; + breq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + if(v4l2camsrc->use_mmap) + breq.memory = V4L2_MEMORY_MMAP; + else + breq.memory = V4L2_MEMORY_USERPTR; + + GST_DEBUG_OBJECT (v4l2camsrc, "REQUEST 0 to finalize buffer"); + + if(ioctl(v4l2camsrc->video_fd, VIDIOC_REQBUFS, &breq) < 0) + GST_DEBUG_OBJECT (v4l2camsrc, "Failed to REQUEST 0 to finalize buffer"); + + + if (v4l2camsrc->pool) { + gst_v4l2camsrc_buffer_pool_destroy (v4l2camsrc->pool, v4l2camsrc); + v4l2camsrc->pool = NULL; + } + + GST_V4L2CAMSRC_SET_INACTIVE (v4l2camsrc); + v4l2camsrc->is_active = FALSE; + mfldcam_3a_stop (); + + return TRUE; + + /* ERRORS */ +streamoff_failed: + { + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, CLOSE, + ("Error stopping streaming capture from device '%s'.", + v4l2camsrc->videodev), GST_ERROR_SYSTEM); + return FALSE; + } +} + +/* + */ +static gboolean +gst_v4l2camsrc_get_nearest_size (GstMFLDV4l2CamSrc * v4l2camsrc, + guint32 pixelformat, gint * width, gint * height) +{ + struct v4l2_format fmt; + int fd; + int r; + + g_return_val_if_fail (width != NULL, FALSE); + g_return_val_if_fail (height != NULL, FALSE); + + GST_LOG_OBJECT (v4l2camsrc, + "getting nearest size to %dx%d with format %" GST_FOURCC_FORMAT, + *width, *height, GST_FOURCC_ARGS (pixelformat)); + + fd = v4l2camsrc->video_fd; + + /* get size delimiters */ + memset (&fmt, 0, sizeof (fmt)); + fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + fmt.fmt.pix.width = *width; + fmt.fmt.pix.height = *height; + fmt.fmt.pix.pixelformat = pixelformat; + fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; + +/* FIXME: Disable TRY_FMT check, driver is not returning anything useful yet */ +#if 0 + r = ioctl (fd, VIDIOC_TRY_FMT, &fmt); + if (r < 0 && errno == EINVAL) { + /* try again with progressive video */ + fmt.fmt.pix.width = *width; + fmt.fmt.pix.height = *height; + fmt.fmt.pix.pixelformat = pixelformat; + fmt.fmt.pix.field = V4L2_FIELD_NONE; + r = ioctl (fd, VIDIOC_TRY_FMT, &fmt); + } + + if (r < 0) { + /* The driver might not implement TRY_FMT, in which case we will try + S_FMT to probe */ + if (errno != ENOTTY) + return FALSE; +#endif + + /* Only try S_FMT if we're not actively capturing yet, which we shouldn't + be, because we're still probing */ + if (GST_V4L2CAMSRC_IS_ACTIVE (v4l2camsrc)) + return FALSE; + +#if 0 + GST_LOG_OBJECT (v4l2camsrc, + "Failed to probe size limit with VIDIOC_TRY_FMT, trying VIDIOC_S_FMT"); +#endif + + fmt.fmt.pix.width = *width; + fmt.fmt.pix.height = *height; + + r = ioctl (fd, VIDIOC_S_FMT, &fmt); + if (r < 0 && errno == EINVAL) { + /* try again with progressive video */ + fmt.fmt.pix.width = *width; + fmt.fmt.pix.height = *height; + fmt.fmt.pix.pixelformat = pixelformat; + fmt.fmt.pix.field = V4L2_FIELD_NONE; + r = ioctl (fd, VIDIOC_S_FMT, &fmt); + } + + if (r < 0) + return FALSE; +#if 0 + } +#endif + + GST_LOG_OBJECT (v4l2camsrc, + "got nearest size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height); + + *width = fmt.fmt.pix.width; + *height = fmt.fmt.pix.height; + + return TRUE; +} + + +/****************************************************** + * gst_v4l2camsrc_set_crop(): + * set cropping bounds + * return value: TRUE on success, FALSE on error + ******************************************************/ +static gboolean +gst_v4l2camsrc_set_crop (GstMFLDV4l2CamSrc * v4l2camsrc, + gint x, gint y, gint w, gint h) +{ + gboolean ret = FALSE; + struct v4l2_crop scrop; + struct v4l2_crop gcrop; + gint cx, cy, cw, ch; + int fd; + + fd = v4l2camsrc->video_fd; + cx = v4l2camsrc->vcrop.bounds.left; + cy = v4l2camsrc->vcrop.bounds.top; + cw = v4l2camsrc->vcrop.bounds.width; + ch = v4l2camsrc->vcrop.bounds.height; + + memset (&scrop, 0, sizeof (scrop)); + scrop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + + /* Make sure that cropping area stays inside the crop boundaries */ + scrop.c.left = x > cx ? x : cx; + scrop.c.top = y > cy ? y : cy; + scrop.c.width = (scrop.c.left + w) < (cx + cw) ? w : cx + cw - scrop.c.left; + scrop.c.height = (scrop.c.top + h) < (cy + ch) ? h : cy + ch - scrop.c.top; + + GST_DEBUG_OBJECT (v4l2camsrc, + "Crop request: left = %d, top = %d, w = %d, h = %d", + scrop.c.left, scrop.c.top, scrop.c.width, scrop.c.height); + + g_mutex_lock (v4l2camsrc->device_mutex); + + if (-1 == ioctl (fd, VIDIOC_S_CROP, &scrop) && errno != EINVAL) { + goto s_crop_failed; + } + + if (-1 == ioctl (fd, VIDIOC_G_CROP, &gcrop)) { + goto g_crop_failed; + } + + g_mutex_unlock (v4l2camsrc->device_mutex); + + GST_DEBUG_OBJECT (v4l2camsrc, + "Crop selected: left = %d, top = %d, w = %d, h = %d", + gcrop.c.left, gcrop.c.top, gcrop.c.width, gcrop.c.height); + +#if 0 + if (gcrop.c.left != scrop.c.left || gcrop.c.top != scrop.c.top || + gcrop.c.width != scrop.c.width || gcrop.c.height != scrop.c.height) { + goto crop_not_supported; + } +#endif + + ret = TRUE; + +done: + + return ret; + +/* ERRORS */ +s_crop_failed: + g_mutex_unlock (v4l2camsrc->device_mutex); + GST_DEBUG_OBJECT (v4l2camsrc, "VIDIOC_S_CROP not supported"); + goto done; + +g_crop_failed: + g_mutex_unlock (v4l2camsrc->device_mutex); + GST_DEBUG_OBJECT (v4l2camsrc, "VIDIOC_G_CROP not supported"); + goto done; + +#if 0 +crop_not_supported: + g_mutex_unlock (v4l2camsrc->device_mutex); + GST_DEBUG_OBJECT (v4l2camsrc, "Given crop value not accepted"); + goto done; +#endif +} + + +/****************************************************** + * gst_v4l2camsrc_update_cropping(): + * update cropping area according to width, height and zoom factors. + * return value: TRUE on success, FALSE on error + ******************************************************/ +static gboolean +gst_v4l2camsrc_update_cropping (GstMFLDV4l2CamSrc * v4l2camsrc, gint width, + gint height, gfloat zoom) +{ + gfloat sensor_ar, reso_ar; + gint crop_x, crop_y, crop_w, crop_h; + + g_return_val_if_fail (width != 0, FALSE); + g_return_val_if_fail (height != 0, FALSE); + g_return_val_if_fail (zoom != 0, FALSE); + + if (zoom < 1.0 || zoom > v4l2camsrc->max_zoom_factor) { + GST_DEBUG_OBJECT (v4l2camsrc, "invalid zoom = %.2f", zoom); + return FALSE; + } + if (!v4l2camsrc->crop_supported) { + GST_DEBUG_OBJECT (v4l2camsrc, "crop not supported"); + return FALSE; + } + + sensor_ar = v4l2camsrc->vcrop.defrect.width / + v4l2camsrc->vcrop.defrect.height; + reso_ar = width / height; + + if (sensor_ar > reso_ar) { + crop_w = (width * v4l2camsrc->vcrop.defrect.height / height) / zoom; + crop_h = MAX (v4l2camsrc->vcrop.defrect.height, height) / zoom; + } else { + crop_w = MAX (v4l2camsrc->vcrop.defrect.width, width) / zoom; + crop_h = (height * v4l2camsrc->vcrop.defrect.width / width) / zoom; + } + + crop_x = ABS ((v4l2camsrc->vcrop.defrect.width - crop_w) / 2); + crop_y = ABS ((v4l2camsrc->vcrop.defrect.height - crop_h) / 2); + + GST_LOG_OBJECT (v4l2camsrc, "set cropping: x: %d, y: %d, w: %d, h: %d", + crop_x, crop_y, crop_w, crop_h); + + return gst_v4l2camsrc_set_crop (v4l2camsrc, crop_x, crop_y, crop_w, crop_h); +} + +/*************************************************************************** + * New MFLD Camera Code **************************************************** + * *************************************************************************/ + +/* These map tables help to adapt the general photography interface to + * different device implementations. + * + * Table index = Enumeration value from GstPhotography + * Table value = Device/Implementation -specific setting value + */ + +static const gint gst_v4l2camsrc_scene_map[] = { + 999, /* GST_PHOTOGRAPHY_SCENE_MODE_MANUAL */ + CAM_GENERAL_SCENE_MODE_CLOSEUP, + CAM_GENERAL_SCENE_MODE_PORTRAIT, + CAM_GENERAL_SCENE_MODE_LANDSCAPE, + CAM_GENERAL_SCENE_MODE_SPORT, + CAM_GENERAL_SCENE_MODE_NIGHT, + CAM_GENERAL_SCENE_MODE_AUTO, + -1 +}; + +static const gint gst_v4l2camsrc_flash_map[] = { + CAM_LIGHT_FLASH_MODE_AUTO, + CAM_LIGHT_FLASH_MODE_OFF, + CAM_LIGHT_FLASH_MODE_ON, + CAM_LIGHT_FLASH_MODE_FILL_IN, + CAM_LIGHT_FLASH_MODE_RED_EYE, + -1 +}; + +static const gint gst_v4l2camsrc_focus_map[] = { + CAM_FOCUS_MODE_AUTO, /* GST_PHOTOGRAPHY_FOCUS_MODE_AUTO = 0 */ + CAM_FOCUS_MODE_MACRO, /* GST_PHOTOGRAPHY_FOCUS_MODE_MACRO */ + CAM_FOCUS_MODE_TOUCH_AUTO, /* GST_PHOTOGRAPHY_FOCUS_MODE_PORTRAIT */ + CAM_FOCUS_MODE_FULL, /* GST_PHOTOGRAPHY_FOCUS_MODE_INFINITY */ + CAM_FOCUS_MODE_AUTO, /* GST_PHOTOGRAPHY_FOCUS_MODE_HYPERFOCAL */ + CAM_FOCUS_MODE_FULL, /* GST_PHOTOGRAPHY_FOCUS_MODE_EXTENDED */ + CAM_FOCUS_MODE_CONTINUOUS, /* GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_NORMAL */ + CAM_FOCUS_MODE_CONTINUOUS, /* GST_PHOTOGRAPHY_FOCUS_MODE_CONTINUOUS_EXTENDED */ + -1 +}; + +static const gint gst_v4l2camsrc_flicker_map[] = { + CAM_GENERAL_FLICKER_REDUCTION_MODE_OFF, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_OFF = 0 */ + CAM_GENERAL_FLICKER_REDUCTION_MODE_50HZ, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_50HZ */ + CAM_GENERAL_FLICKER_REDUCTION_MODE_60HZ, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_60HZ */ + CAM_GENERAL_FLICKER_REDUCTION_MODE_AUTO, /* GST_PHOTOGRAPHY_FLICKER_REDUCTION_AUTO */ + -1 +}; + +static const gint gst_v4l2camsrc_iso_map[][2] = { + {CAM_EXPOSURE_ISO_VALUE_1600, 1600}, + {CAM_EXPOSURE_ISO_VALUE_800, 800}, + {CAM_EXPOSURE_ISO_VALUE_400, 400}, + {CAM_EXPOSURE_ISO_VALUE_200, 200}, + {CAM_EXPOSURE_ISO_VALUE_100, 100}, + {CAM_EXPOSURE_ISO_VALUE_AUTO, 0} +}; + +static const gint gst_v4l2camsrc_focus_status_map[][2] = +{ + { GST_PHOTOGRAPHY_FOCUS_STATUS_NONE, CAM_FOCUS_STATUS_IDLE }, + { GST_PHOTOGRAPHY_FOCUS_STATUS_RUNNING, CAM_FOCUS_STATUS_RUNNING }, + { GST_PHOTOGRAPHY_FOCUS_STATUS_SUCCESS, CAM_FOCUS_STATUS_SUCCESS }, + { GST_PHOTOGRAPHY_FOCUS_STATUS_FAIL, CAM_FOCUS_STATUS_FAIL }, + { -1, -1 } +}; + +static const gchar *cameralib_error_map[] = { + "CAM_ERR_NONE", + "CAM_ERR_PARAM", + "CAM_ERR_UNSUPP", + "CAM_ERR_HW", + "CAM_ERR_SYS", + "CAM_ERR_LEXIT", + "CAM_ERR_DEPRECATED", + "CAM_ERR_INVALID_STATE", + "CAM_ERR_INTERNAL", + "CAM_ERR_NOT_OPEN", + "CAM_ERR_3A" +}; + +/* + * + */ +static cam_exposure_iso_value_t +gst_v4l2camsrc_libmfld_map_iso (guint iso) +{ + guint i = 0; + + while (gst_v4l2camsrc_iso_map[i][1] != 0) { + if (iso >= gst_v4l2camsrc_iso_map[i][1]) { + break; + } + i++; + } + + return gst_v4l2camsrc_iso_map[i][1]; +} + + +/* + * + */ +static guint +gst_v4l2camsrc_libmfld_reverse_map_iso (cam_exposure_iso_value_t iso) +{ + guint i = 0; + + while (gst_v4l2camsrc_iso_map[i][1] != 0) { + if (iso == gst_v4l2camsrc_iso_map[i][0]) { + break; + } + i++; + } + + return gst_v4l2camsrc_iso_map[i][1]; +} + +static const gint gst_v4l2camsrc_shakerisk_map[] = { + GST_PHOTOGRAPHY_SHAKE_RISK_LOW, /* CAM_EXPOSURE_SHAKE_RISK_LOW = 0 */ + GST_PHOTOGRAPHY_SHAKE_RISK_MEDIUM, /* CAM_EXPOSURE_SHAKE_RISK_MEDIUM */ + GST_PHOTOGRAPHY_SHAKE_RISK_HIGH, /* CAM_EXPOSURE_SHAKE_RISK_HIGH */ +}; + +/* + * Return index of the item in the table. + */ +static gint +find_item (const gint table[], const gint item) +{ + guint i = 0; + + while (table[i] != -1) { + if (table[i] == item) + return i; + i++; + } + return 0; +} + +gboolean +gst_v4l2camsrc_read_settings (GstCameraSrc * camsrc, + GstPhotoSettings * photoconf) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gint32 wbmode, effect, scene, flash, focus; + gint32 flicker; + gint32 ev, iso, aperture, exposure, noise_reduction; + cam_err_t err; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + g_mutex_lock (v4l2camsrc->device_mutex); + + if (!v4l2camsrc->initialized) { + GST_DEBUG ("Device not initialized"); + g_mutex_unlock (v4l2camsrc->device_mutex); + return FALSE; + } + + GST_DEBUG ("Retrieving settings from camera"); + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_AWB_MODE, &wbmode); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_AWB_MODE query failed: %s", cameralib_error_map[err]); + goto error; + } + + err = + cam_feature_get (v4l2camsrc->video_fd, CAM_GENERAL_EFFECT_TYPE, &effect); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_GENERAL_EFFECT_TYPE query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_GENERAL_SCENE_MODE, &scene); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_GENERAL_SCENE_MODE query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_LIGHT_FLASH_MODE, &flash); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_LIGHT_FLASH_MODE query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_EXPOSURE_COMPENSATION, &ev); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_EXPOSURE_COMPENSATION query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_EXPOSURE_ISO_VALUE, &iso); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_EXPOSURE_ISO_VALUE query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_EXPOSURE_MANUAL_APERTURE, + &aperture); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_EXPOSURE_MANUAL_APERTURE query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = + cam_feature_get (v4l2camsrc->video_fd, CAM_EXPOSURE_MANUAL_TIME, + &exposure); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_EXPOSURE_MANUAL_TIME query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = cam_feature_get (v4l2camsrc->video_fd, CAM_FOCUS_MODE, &focus); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_FOCUS_MODE query failed: %s", cameralib_error_map[err]); + goto error; + } + + err = + cam_feature_get (v4l2camsrc->video_fd, CAM_GENERAL_FLICKER_REDUCTION_MODE, + &flicker); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_GENERAL_FLICKER_REDUCTION_MODE query failed: %s", + cameralib_error_map[err]); + goto error; + } + + err = + cam_feature_get (v4l2camsrc->video_fd, CAM_NOISE_REDUCTION_MODE, + &noise_reduction); + if (err != CAM_ERR_NONE) { + GST_DEBUG ("CAM_NOISE_REDUCTION query failed: %s", + cameralib_error_map[err]); + goto error; + } + + photoconf->wb_mode = find_item (gst_v4l2camsrc_wb_map, wbmode); + photoconf->tone_mode = find_item (gst_v4l2camsrc_effect_map, effect); + photoconf->scene_mode = find_item (gst_v4l2camsrc_scene_map, scene); + photoconf->flash_mode = find_item (gst_v4l2camsrc_flash_map, flash); + photoconf->focus_mode = find_item (gst_v4l2camsrc_focus_map, focus); + photoconf->flicker_mode = find_item (gst_v4l2camsrc_flicker_map, flicker); + + photoconf->ev_compensation = (gfloat) ev / 10; + photoconf->iso_speed = gst_v4l2camsrc_libmfld_map_iso (iso); + photoconf->aperture = aperture; + photoconf->exposure = exposure; + photoconf->noise_reduction = noise_reduction; + + GST_DEBUG ("Scene mode ext: %d, lib: %d", photoconf->scene_mode, scene); + GST_DEBUG ("Focus mode ext: %d, lib: %d", photoconf->focus_mode, focus); + GST_DEBUG ("Flash mode ext: %d, lib: %d", photoconf->flash_mode, flash); + GST_DEBUG ("ISO: %d, EV_comp: %f (%d)", iso, photoconf->ev_compensation, ev); + + g_mutex_unlock (v4l2camsrc->device_mutex); + + return TRUE; + +/* ERRORS */ +error: + { + g_mutex_unlock (v4l2camsrc->device_mutex); + return FALSE; + } +} +gboolean +gst_v4l2camsrc_read_exif (GstCameraSrc * camsrc, + GstCameraControlExifInfo *exif_info) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + cam_err_t err; + struct atomisp_makernote_info maker_note; + unsigned char *buf; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + + if (!v4l2camsrc->initialized) { + GST_DEBUG ("Device not initialized"); + return FALSE; + } + buf = (unsigned char *)&maker_note; + + GST_DEBUG ("Retrieving makernote from atomisp"); + err = cam_get_makernote (v4l2camsrc->video_fd, buf, 0); + + if(err) + goto error; + + /* focal lenght */ + exif_info->focal_len_numerator = maker_note.focal_length >> 16; + exif_info->focal_len_denominator = maker_note.focal_length & 0xFFFF; + /* Aperture value (f_num) */ + exif_info->aperture_f_num_numerator = maker_note.f_number_curr >> 16; + exif_info->aperture_f_num_denominator = maker_note.f_number_curr & 0xFFFF; + /* isp major, minor,patch level */ + exif_info->software_used = v4l2camsrc->vcap.version; + exif_info->colorspace = 65535; /* Uncalibrated (=65535) */ + exif_info->metering_mode = 1; /* Average */ + cam_feature_get (v4l2camsrc->video_fd, CAM_EXPOSURE_ISO_VALUE, &exif_info->iso); + + //* TODO complete exif info */ + exif_info->exposure_time_denominator = 0; + exif_info->exposure_time_numerator = 0; + + exif_info->shutter_speed_numerator = 0; + exif_info->shutter_speed_denominator = 0; + exif_info->brigtness_numerator = 0; + exif_info->brightness_denominator = 0; + exif_info->flash = cam_is_flash_used(); + GST_DEBUG ("Focal lenght numerator : %d", exif_info->focal_len_numerator); + GST_DEBUG ("Focal lenght denominator : %d", exif_info->focal_len_denominator); + GST_DEBUG ("F num numerator : %d", exif_info->aperture_f_num_numerator); + GST_DEBUG ("F num denominator : %d", exif_info->aperture_f_num_denominator); + GST_DEBUG ("flash used : %d", exif_info->flash); + + return TRUE; + +/* ERRORS */ +error: + { + g_mutex_unlock (v4l2camsrc->device_mutex); + return FALSE; + } +} + +gboolean +gst_v4l2camsrc_set_flash_mode (GstCameraSrc * camsrc, + int value) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + cam_err_t err; + gint32 flash; + + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + g_mutex_lock (v4l2camsrc->device_mutex); + + if (!v4l2camsrc->initialized) { + GST_DEBUG ("Device not initialized"); + g_mutex_unlock (v4l2camsrc->device_mutex); + return FALSE; + } + + switch (value) { + case GST_CAMERA_STROBE_CAP_NONE: + case GST_CAMERA_STROBE_CAP_OFF: + flash = CAM_LIGHT_FLASH_MODE_OFF; + break; + case GST_CAMERA_STROBE_CAP_ON: + flash = CAM_LIGHT_FLASH_MODE_ON; + break; + case GST_CAMERA_STROBE_CAP_AUTO: + flash = CAM_LIGHT_FLASH_MODE_AUTO; + break; + case GST_CAMERA_STROBE_CAP_REDEYE: + flash = CAM_LIGHT_FLASH_MODE_RED_EYE; + break; + default: + flash = CAM_LIGHT_FLASH_MODE_OFF; + break; + } + + GST_DEBUG_OBJECT (v4l2camsrc, "Set flash mode: %d",flash); + err= cam_feature_set (v4l2camsrc->video_fd, CAM_LIGHT_FLASH_MODE, + flash); + + if(err) + goto error; + + g_mutex_unlock (v4l2camsrc->device_mutex); + + return TRUE; + +/* ERRORS */ +error: + { + g_mutex_unlock (v4l2camsrc->device_mutex); + return FALSE; + } +} + +gboolean +gst_v4l2camsrc_write_settings (GstCameraSrc * camsrc, + GstPhotoSettings * photoconf, gboolean scene_override) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + GST_DEBUG ("Write settings to libmfldcamd"); + + g_mutex_lock (v4l2camsrc->device_mutex); + + if (!v4l2camsrc->initialized) { + g_mutex_unlock (v4l2camsrc->device_mutex); + GST_DEBUG ("Device not initialized"); + return FALSE; + } + + if (scene_override && + photoconf->scene_mode != GST_PHOTOGRAPHY_SCENE_MODE_MANUAL) { + /* If scene override flag is set, we just leave the new settings in use */ + GST_DEBUG ("SCENE OVERRIDE, ext=%d", photoconf->scene_mode); + cam_feature_set (v4l2camsrc->video_fd, CAM_GENERAL_SCENE_MODE, + gst_v4l2camsrc_scene_map[photoconf->scene_mode]); + } else { + GST_DEBUG ("Normal settings"); + cam_feature_set (v4l2camsrc->video_fd, CAM_AWB_MODE, + gst_v4l2camsrc_wb_map[photoconf->wb_mode]); + + cam_feature_set (v4l2camsrc->video_fd, CAM_GENERAL_EFFECT_TYPE, + gst_v4l2camsrc_effect_map[photoconf->tone_mode]); + + cam_feature_set (v4l2camsrc->video_fd, CAM_LIGHT_FLASH_MODE, + gst_v4l2camsrc_flash_map[photoconf->flash_mode]); + + /* These will set exposure mode to MANUAL, is value is != 0 */ + cam_feature_set (v4l2camsrc->video_fd, CAM_EXPOSURE_MANUAL_TIME, + photoconf->exposure); + cam_feature_set (v4l2camsrc->video_fd, CAM_EXPOSURE_MANUAL_APERTURE, + photoconf->aperture); + + cam_feature_set (v4l2camsrc->video_fd, CAM_EXPOSURE_COMPENSATION, + (gint) (photoconf->ev_compensation * 10)); + + cam_feature_set (v4l2camsrc->video_fd, CAM_EXPOSURE_ISO_VALUE, + gst_v4l2camsrc_libmfld_reverse_map_iso (photoconf->iso_speed)); + + cam_feature_set (v4l2camsrc->video_fd, CAM_FOCUS_MODE, + gst_v4l2camsrc_focus_map[photoconf->focus_mode]); + + cam_feature_set (v4l2camsrc->video_fd, CAM_GENERAL_FLICKER_REDUCTION_MODE, + gst_v4l2camsrc_flicker_map[photoconf->flicker_mode]); + + cam_feature_set (v4l2camsrc->video_fd, CAM_NOISE_REDUCTION_MODE, + photoconf->noise_reduction); + } + g_mutex_unlock (v4l2camsrc->device_mutex); + + return TRUE; +} + +gboolean +gst_v4l2camsrc_set_AeAafwindow (GstCameraSrc * camsrc, GstCameraSrc3a_window window) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = TRUE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + GST_DEBUG_OBJECT(v4l2camsrc,"ae-af-window-setting: x_left:%d, x_right:%d," + "y_bottom:%d, y_top:%d, weight:%d.\n", + window.x_left,window.x_right, + window.y_bottom, window.y_top, + window.weight); + + v4l2camsrc->af_window = v4l2camsrc->ae_window = *(GstCameraWindow*)&window; + + if (v4l2camsrc->is_active) { + g_mutex_lock (v4l2camsrc->device_mutex); + cam_set_af_ae_window((advci_window *) (&v4l2camsrc->af_window)); + g_mutex_unlock (v4l2camsrc->device_mutex); + } + + return ret; +} + +gboolean +gst_v4l2camsrc_set_autofocus (GstCameraSrc * camsrc, gboolean on) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = FALSE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + g_mutex_lock (v4l2camsrc->device_mutex); + + cam_err_t err = CAM_ERR_NONE; + + err= cam_set_autofocus(on); + v4l2camsrc->af_enabled = on; + + ret = (err == CAM_ERR_NONE); + + g_mutex_unlock (v4l2camsrc->device_mutex); + + GST_DEBUG ("setting autofocus: %s", ret ? "ok" : "failed"); + + return ret; +} + +/* + * + */ +gboolean +gst_v4l2camsrc_set_autoexposure (GstCameraSrc * camsrc, gboolean on) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = FALSE; + gchar *mode; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + mode = on ? "ON" : "OFF"; + + GST_DEBUG_OBJECT (v4l2camsrc, "setting autoexposure: %s", mode); + + + g_mutex_lock (v4l2camsrc->device_mutex); + + cam_err_t err = CAM_ERR_NONE; + + err = cam_set_autoexposure(v4l2camsrc->video_fd, on); + v4l2camsrc->ae_enabled = on; + + ret = (err == CAM_ERR_NONE); + g_mutex_unlock (v4l2camsrc->device_mutex); + + GST_DEBUG_OBJECT (v4l2camsrc, "Setting autoexposure %s: %s", mode, + ret ? "SUCCESS" : "FAILED"); + + return ret; +} + +/* + * + */ +GstPhotoCaps +gst_v4l2camsrc_get_capabilities (GstCameraSrc * camsrc) +{ + GstPhotoCaps pcaps; + + pcaps = GST_PHOTOGRAPHY_CAPS_ZOOM | GST_PHOTOGRAPHY_CAPS_EV_COMP | + GST_PHOTOGRAPHY_CAPS_WB_MODE | GST_PHOTOGRAPHY_CAPS_TONE | + GST_PHOTOGRAPHY_CAPS_SCENE | GST_PHOTOGRAPHY_CAPS_FLASH | + GST_PHOTOGRAPHY_CAPS_FOCUS | GST_PHOTOGRAPHY_CAPS_APERTURE | + GST_PHOTOGRAPHY_CAPS_EXPOSURE | GST_PHOTOGRAPHY_CAPS_SHAKE | + GST_PHOTOGRAPHY_CAPS_ISO_SPEED; + + return pcaps; +} + +/* + * + */ +gboolean +gst_v4l2camsrc_check_focus_status (GstMFLDV4l2CamSrc * v4l2camsrc, + GstCameraFocusStatus *fs, gboolean detailed) +{ + gboolean ret = FALSE; + cam_focus_status_t status; + gboolean update; + + GST_DEBUG_OBJECT (v4l2camsrc, "Retrieving focus status"); + + update = cam_checkfocus_status (&status, (v4l2camsrc->debug_flags & GST_CAMERASRC_DEBUG_FLAGS_AUTO_FOCUS)); + + if (update == TRUE) { + fs->status = cam_find_item_new (gst_v4l2camsrc_focus_status_map, status, TRUE); + ret = TRUE; + + GST_DEBUG_OBJECT (v4l2camsrc, "Focus status: %d", fs->status); + } + return ret; +} + +gboolean +gst_v4l2camsrc_set_capture_mode (GstCameraSrc * camsrc, + GstCameraSrcCaptureMode mode) +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gboolean ret = FALSE; + int err; //cam_err_t err; + cam_capture_mode_t cam_mode; + static const gchar *cmodes[] = { "VIEWFINDER", "STILL", "VIDEO" }; + struct v4l2_streamparm parm; + int binary; + gint fd; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + + fd = v4l2camsrc->video_fd; + + cam_mode = find_item (gst_v4l2camsrc_capture_map, mode); + + g_mutex_lock (v4l2camsrc->device_mutex); + + if (v4l2camsrc->initialized) { + v4l2camsrc->capture_mode = mode; + GST_DEBUG ("Setting isp capture mode: %s", cmodes[mode]); + memset (&parm, 0x00, sizeof (struct v4l2_streamparm)); + + parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + err = ioctl(fd, VIDIOC_G_PARM, &parm); + if (err < 0) { + GST_DEBUG("Unable to get frame rate: %s (%d).\n", + strerror(errno), errno); + goto done; + } + GST_DEBUG("Current frame rate: %u/%u\n", + parm.parm.capture.timeperframe.numerator, + parm.parm.capture.timeperframe.denominator); + + GST_DEBUG("Current run mode: %04x\n", parm.parm.capture.capturemode); + + switch (mode) { + case GST_CAMERA_SRC_CAPTURE_MODE_VIEWFINDER: + binary = CI_MODE_PREVIEW; + break;; + case GST_CAMERA_SRC_CAPTURE_MODE_STILL: + binary = CI_MODE_STILL_CAPTURE; + break; + case GST_CAMERA_SRC_CAPTURE_MODE_VIDEO: + binary = CI_MODE_VIDEO; + break; + default: + binary = CI_MODE_PREVIEW; + break; + } + parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; + parm.parm.capture.capturemode = binary; + + GST_DEBUG("New run mode: %04x\n", parm.parm.capture.capturemode); + + if (ioctl (fd, VIDIOC_S_PARM, &parm) < 0) { + GST_DEBUG("Switch Mode failed !\n"); + } + + ret = (err >= 0); + GST_DEBUG_OBJECT (v4l2camsrc, "Setting capture mode done: %s", + ret ? "OK" : "FAIL"); + } + else { + GST_DEBUG_OBJECT (v4l2camsrc, "Device not initialized"); + } + +done: + g_mutex_unlock (v4l2camsrc->device_mutex); + + return ret; +} + +gboolean gst_v4l2camsrc_set_strobe_state (GstCameraSrc * camsrc, + gboolean state) + +{ + GstMFLDV4l2CamSrc *v4l2camsrc; + gint fd; + cam_err_t err = CAM_ERR_NONE; + + v4l2camsrc = GST_V4L2CAMSRC (camsrc); + fd = v4l2camsrc->video_fd; + + err = cam_set_flash (fd, state); + return(err == CAM_ERR_NONE); +} +gboolean +gst_libmfldcam_capture_correction_update (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + int fd = v4l2camsrc->video_fd; + gboolean ret; + + if (!v4l2camsrc->cc_updated) + return TRUE; + + if (v4l2camsrc->gdc_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_GDC, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_GDC, FALSE); + + if (ret) + goto set_cc_failed; + + if (v4l2camsrc->cac_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_CAC, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_CAC, FALSE); + + if (ret) + goto set_cc_failed; + + if (v4l2camsrc->dvs_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_DVS, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_DVS, FALSE); + + if (ret) + goto set_cc_failed; + + if (v4l2camsrc->ee_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_EE, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_EE, FALSE); + + if (ret) + goto set_cc_failed; + + if (v4l2camsrc->sc_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_SC, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_SC, FALSE); + + if (ret) + goto set_cc_failed; + + if (v4l2camsrc->blc_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_BLC, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_BLC, FALSE); + + if (v4l2camsrc->bpd_enabled) + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_BPD, TRUE); + else + ret = cam_set_capture_correction (fd, CAM_CAPTURE_CORRECTION_BPD, FALSE); + + if (ret) + goto set_cc_failed; + + v4l2camsrc->cc_updated = FALSE; + + if (ret) + goto set_cc_failed; + + + return TRUE; + +set_cc_failed: + GST_WARNING_OBJECT (v4l2camsrc, "Set capture correction failed \n"); + return FALSE; + +} + +/** + * gst_v4l2camsrc_libmfldcam_set_zoom: + * @v4l2camsrc: #GstMFLDV4l2CamSrc object. + * @zoom: Desired zoom factor. + * + * Set the zoom factor for captured image. + * + * Returns: TRUE on success. + */ +gboolean +gst_v4l2camsrc_libmfldcam_set_zoom (GstMFLDV4l2CamSrc * v4l2camsrc, gfloat zoom) +{ + cam_err_t err = CAM_ERR_NONE; + gboolean ret; + + GST_DEBUG_OBJECT (v4l2camsrc, "ZOOM: %f", zoom); + + err = cam_set_zoom (v4l2camsrc->video_fd, zoom); + /* ret = err == CAM_ERR_NONE; */ + ret = TRUE; + + GST_DEBUG_OBJECT (v4l2camsrc, "Setting zoom: %s", ret ? "SUCCESS" : "FAIL"); + + return ret; +} + +gboolean +gst_v4l2camsrc_libmfldcam_get_makernote (GstMFLDV4l2CamSrc * v4l2camsrc, unsigned char *buf, unsigned size) +{ + cam_err_t err = CAM_ERR_NONE; + + GST_DEBUG_OBJECT (v4l2camsrc, "%s, !!!!!!line:%d\n", __func__, __LINE__); + + err = cam_get_makernote (v4l2camsrc->video_fd, buf, size); + return err; +} + +gboolean +gst_v4l2camsrc_libmfldcam_get_focus_posi(GstMFLDV4l2CamSrc * v4l2camsrc, unsigned *posi) +{ + cam_err_t err = CAM_ERR_NONE; + err = cam_get_focus_posi(v4l2camsrc->video_fd, posi); + if(err != CAM_ERR_NONE) + return FALSE; + + return TRUE; +} + +/** + * @v4l2camsrc: #GstMFLDV4l2CamSrc object + * + * configure driver default settings and set the opened ISP fd to libmfldcam + */ +gboolean +gst_v4l2camsrc_libmfldcam_init (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + cam_err_t err; + if (v4l2camsrc->input_sensor == GST_CAMERA_INPUT_SENSOR_PRIMARY) + err = cam_driver_init (v4l2camsrc->video_fd, "mt9e013"); + else + err = cam_driver_init (v4l2camsrc->video_fd, "mt9m114"); + + if (err != CAM_ERR_NONE) { + GST_WARNING ("libmfldcam initialization failed"); + return FALSE; + } + if (v4l2camsrc->cc_updated) + gst_libmfldcam_capture_correction_update (v4l2camsrc); + + if (v4l2camsrc->gamma_updated) { + cam_set_tone_control (v4l2camsrc->video_fd, CAM_GAMMA_VALUE, + &v4l2camsrc->tone); + cam_set_tone_control (v4l2camsrc->video_fd, CAM_BRIGHTNESS_VALUE, + &v4l2camsrc->tone); + cam_set_tone_control (v4l2camsrc->video_fd, CAM_CONTRAST_VALUE, + &v4l2camsrc->tone); + v4l2camsrc->gamma_updated = FALSE; + } + + /* Set the default settings here */ + //FIXME + GST_DEBUG_OBJECT (v4l2camsrc, "Configure default settings %s", + v4l2camsrc->videodev); + + gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_VFLIP,v4l2camsrc->vflip); + gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_HFLIP,v4l2camsrc->hflip); + + v4l2camsrc->initialized = TRUE; + v4l2camsrc->is_open = TRUE; + return TRUE; +} + +gboolean +gst_v4l2camsrc_libmfldcam_deinit (GstMFLDV4l2CamSrc * v4l2camsrc) +{ + //FIXME : determin whether is it initialized + cam_err_t err; + err = cam_driver_deinit (v4l2camsrc->video_fd); + if (err != CAM_ERR_NONE) { + GST_WARNING ("libmfldcam initialization failed"); + return FALSE; + } + //FIXME + v4l2camsrc->is_open = FALSE; + return TRUE; +} diff --git a/gst/mfldv4l2cam/v4l2camsrc_calls.h b/gst/mfldv4l2cam/v4l2camsrc_calls.h new file mode 100644 index 0000000..e20a6ed --- /dev/null +++ b/gst/mfldv4l2cam/v4l2camsrc_calls.h @@ -0,0 +1,141 @@ +/* GStreamer + * + * Copyright (C) 2002 Ronald Bultje + * 2006 Edgard Lima + * 2008-2010 Nokia Corporation + * 2010 Intel Corporation + * + * v4l2camsrc.h - system calls + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __V4L2CAMSRC_CALLS_H__ +#define __V4L2CAMSRC_CALLS_H__ + +#include "gstv4l2camsrc.h" + +/* simple check whether the device is open */ +#define GST_V4L2CAMSRC_IS_OPEN(v4l2camsrc) \ + (v4l2camsrc->video_fd > 0) + +/* check whether the device is 'active' */ +#define GST_V4L2CAMSRC_IS_ACTIVE(v4l2camsrc) \ + (v4l2camsrc->buffer != NULL) + +#define GST_V4L2CAMSRC_IS_OVERLAY(v4l2camsrc) \ + (v4l2camsrc->vcap.capabilities & V4L2CAMSRC_CAP_VIDEO_OVERLAY) + +/* checks whether the current v4lv4l2object has already been open()'ed or not */ +#define GST_V4L2CAMSRC_CHECK_OPEN(v4l2camsrc) \ + if (!GST_V4L2CAMSRC_IS_OPEN(v4l2camsrc)) \ +{ \ + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, \ + ("Device is not open."), (NULL)); \ + return FALSE; \ +} + +/* checks whether the current v4lv4l2object is close()'ed or whether it is still open */ +#define GST_V4L2CAMSRC_CHECK_NOT_OPEN(v4l2camsrc) \ + if (GST_V4L2CAMSRC_IS_OPEN(v4l2camsrc)) \ +{ \ + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, \ + ("Device is open."), (NULL)); \ + return FALSE; \ +} + +/* checks whether we're in capture mode or not */ +#define GST_V4L2CAMSRC_CHECK_ACTIVE(v4l2camsrc) \ + if (!GST_V4L2CAMSRC_IS_ACTIVE(v4l2camsrc)) \ +{ \ + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, \ + (NULL), ("Device is not in streaming mode")); \ + return FALSE; \ +} + +/* checks whether we're out of capture mode or not */ +#define GST_V4L2CAMSRC_CHECK_NOT_ACTIVE(v4l2camsrc) \ + if (GST_V4L2CAMSRC_IS_ACTIVE(v4l2camsrc)) \ +{ \ + GST_ELEMENT_ERROR (v4l2camsrc, RESOURCE, SETTINGS, \ + (NULL), ("Device is in streaming mode")); \ + return FALSE; \ +} + + +#define GST_V4L2CAMSRC_MAX_BUFFERS 16 +#define GST_V4L2CAMSRC_MIN_BUFFERS 1 +#define GST_V4L2CAMSRC_DEFAULT_BUFFERS 4 + +#define GST_V4L2CAMSRC_START_INPUT 0 +#define GST_V4L2CAMSRC_MAX_INPUT 1 + +/** + * V4L2CameraInputSensor: + * @V2L2_CAMERA_INPUT_SENSOR_SECONDARY: Secondary Sensor as ISP input + * @V2L2_CAMERA_INPUT_SENSOR_PRIMARY: Primary Sensor as ISP input + * + * Input sensor + */ +typedef enum { + V2L2_CAMERA_INPUT_SENSOR_SECONDARY, + V2L2_CAMERA_INPUT_SENSOR_PRIMARY +} V4L2CameraInputSensor; + +/* open/close the device */ +gboolean gst_v4l2camsrc_open (GstCameraSrc *camsrc); + +gboolean gst_v4l2camsrc_close (GstCameraSrc *camsrc); + +/* attribute control */ +gboolean gst_v4l2camsrc_get_attribute (GstCameraSrc *camsrc, + int attribute, + int *value); + +gboolean gst_v4l2camsrc_set_attribute (GstCameraSrc *camsrc, + int attribute, + const int value); + +gboolean gst_v4l2camsrc_set_capture (GstCameraSrc *camsrc, + GstOperationMode mode, + gboolean try_only, + guint32 *pixelformat, + guint *width, guint *height, + guint *fps_n, guint *fps_d); + +gboolean gst_v4l2camsrc_capture_start (GstCameraSrc *camsrc, + GstCaps *caps); + +GstFlowReturn gst_v4l2camsrc_grab_frame (GstCameraSrc *camsrc, + GstBuffer **buf, + GstCameraCapturePhase phase); + +gboolean gst_v4l2camsrc_capture_stop (GstCameraSrc *camsrc); + +/* Used internally */ +gboolean gst_v4l2camsrc_fill_format_list (GstMFLDV4l2CamSrc *v4l2camsrc); +gboolean gst_v4l2camsrc_clear_format_list (GstMFLDV4l2CamSrc *v4l2camsrc); +GstCaps * gst_v4l2camsrc_probe_caps_for_format (GstMFLDV4l2CamSrc *v4l2camsrc, + guint32 pixelformat, + const GstStructure *template); +gboolean +gst_v4l2camsrc_libmfldcam_set_zoom (GstMFLDV4l2CamSrc *v4l2camsrc, gfloat zoom); +gboolean +gst_v4l2camsrc_libmfldcam_get_makernote (GstMFLDV4l2CamSrc * v4l2camsrc, unsigned char *buf, unsigned size); +gboolean +gst_v4l2camsrc_libmfldcam_get_focus_posi(GstMFLDV4l2CamSrc * v4l2camsrc, unsigned *posi); + +#endif /* __V4L2CAMSRC_CALLS_H__ */ diff --git a/packaging/gst-plugins-atomisp.changes b/packaging/gst-plugins-atomisp.changes new file mode 100644 index 0000000..e1e9129 --- /dev/null +++ b/packaging/gst-plugins-atomisp.changes @@ -0,0 +1,364 @@ +* Fri Feb 01 2013 Jussi Saavalainen accepted/tizen_2.0/20130128.180320@3e02944 +- Remove low-res frame ISP padding with software crop. TZSP-4269 + +* Wed Jan 30 2013 Telle-Tiia Pitkänen submit/tizen_2.0/20130128.132122@0e78adb +- Rename package to gst-plugins-atomisp + +* Sun Jan 27 2013 Marko Ollonen accepted/trunk/20130117.213933@b58e03f +- af window configuration update, TZSP-1668. + +* Thu Jan 17 2013 Jussi Saavalainen submit/trunk/20130117.150127@471e12c +- Add ISP ae_mode setting as Gst property. TZSP-4682 +- Add exposure mode initialisation. + +* Thu Jan 17 2013 Marko Ollonen accepted/trunk/20130114.185021@5615cfc +- colorspace: fourcc typo, TZSP-4757. + +* Wed Jan 16 2013 Marko Ollonen accepted/trunk/20130114.185021@4590562 +- continuos focus mode support added, TZSP-1133. +- fix bugs came from 3a lib dynamic linking change. + +* Mon Jan 14 2013 Marko Ollonen accepted/tizen_2.0/20130110.192337@36cdfa5 +- buffer-sharing: supports