cam_capture_mode_t capture_mode;
int rer_enabled;
int dis_enabled, dvs_enabled;
- int af_enabled, ae_enabled, awb_enabled;
+ int ae_awb_af_enabled;
int af_result; // 0 means fail, it will be filled when captured finished.
int still_af_count, start_still_af;
advci_window window;
mfld_cam_settings.flicker_mode = CAM_GENERAL_FLICKER_REDUCTION_MODE_50HZ;
mfld_cam_settings.focus_mode = CAM_FOCUS_MODE_AUTO;
- mfld_driver.ae_enabled = 0;
- mfld_driver.af_enabled = 0;
- mfld_driver.awb_enabled = 0;
+ mfld_driver.ae_awb_af_enabled = 0;
mfld_driver.dis_enabled = 0;
mfld_driver.dvs_enabled = 0;
mfld_driver.rer_enabled = 0;
mfld_driver.start_still_af = 0;
mfld_driver.still_af_count = 0;
- mfld_driver.first_frame = 1;
+ mfld_driver.first_frame = 0;
mfld_driver.mmap = 1;
mfld_driver.g_3a_started = 0;
mfldadvci->AfSetMode(advci_af_mode_auto);
mfldadvci->AfSetMeteringMode (advci_af_metering_mode_auto);
}
- else
+ else{
mfld_driver.sensor_type = SENSOR_TYPE_SOC;
-
+ mfld_driver.first_frame = 1;
+ }
cam_driver_init_gamma (fd);
/* FixMe Need to get the ISO speed , apture when use need to read these
cam_driver_dbg ("mmap raw image failed");
return;
}
- cam_driver_dbg ("MMAP raw address from kernel 0x%x\n", raw_buffer.start);
+ cam_driver_dbg ("MMAP raw address from kernel %p\n", raw_buffer.start);
buf = (char *) raw_buffer.start;
memset (fn_buf, 0, sizeof (char) * 100);
for (;;) {
sem_wait(&g_sem_3a);
/* Read 3A statistics */
- if ((mfld_driver.ae_enabled || mfld_driver.af_enabled
- || mfld_driver.awb_enabled || mfld_driver.dis_enabled
+ if ((mfld_driver.ae_awb_af_enabled || mfld_driver.dis_enabled
|| mfld_driver.dvs_enabled) && (mfld_driver.sensor_type == SENSOR_TYPE_RAW) )
{
/* AE, AWB and AF Process */
{
gboolean need_assist = FALSE;
mfld_driver.mmap = capture_settings->mmap;
- mfld_driver.ae_enabled = capture_settings->ae;
- mfld_driver.af_enabled = capture_settings->af;
- mfld_driver.awb_enabled = capture_settings->awb;
+ mfld_driver.ae_awb_af_enabled = capture_settings->ae_awb_af;
/* Discard the first two frames */
if (mfld_driver.first_frame) {
}
/* Still AF start */
if ((mfld_driver.start_still_af) && (mfld_driver.sensor_type == SENSOR_TYPE_RAW)) {
- if (mfld_driver.af_enabled && mfld_driver.still_af_count > 0)
+ if (mfld_driver.ae_awb_af_enabled && mfld_driver.still_af_count > 0)
mfldadvci->af_stop ();
mfld_driver.focus_done = 0;
mfld_driver.start_still_af = 0;
mfld_driver.enable_torch = TRUE;
cam_driver_set_torch(fd, TRUE);
}
- if (mfld_driver.af_enabled) {
+ if (mfld_driver.ae_awb_af_enabled) {
mfldadvci->af_start ();
if(mfld_cam_settings.focus_mode == CAM_FOCUS_MODE_TOUCH_AUTO) {
mfldadvci->AfSetWindows (1, &mfld_driver.window );
if (mfld_driver.still_af_count) {
gboolean complete = TRUE; /* AF status */
- if (mfld_driver.af_enabled)
+ if (mfld_driver.ae_awb_af_enabled)
complete = cam_af_is_complete();
if (complete ||
struct cam_capture_settings
{
int mmap;
- int ae, af, awb;
+ int ae_awb_af;
int dump_raw;
unsigned int raw_output_size;
int dump_image;
GST_IMPLEMENT_CAMERA_SRC_PHOTO_METHODS (GstCameraSrc, gst_camsrc);
GST_IMPLEMENT_CAMERA_SRC_CAMERA_CONTROL_METHODS (GstCameraSrc, gst_camsrc);
-
-/* Enumerations */
-enum {
- /*signal*/
- SIGNAL_STILL_CAPTURE,
- SIGNAL_NEGO_COMPLETE,
- LAST_SIGNAL
-};
-
-static guint gst_camerasrc_signals[LAST_SIGNAL] = { 0 };
+guint gst_camerasrc_signals[CAMERA_IN_LAST_SIGNAL] = { 0 };
static gboolean
gst_camerasrc_iface_supported (GstImplementsInterface * iface, GType iface_type)
data2);
}
+void gst_camerasrc_VOID__OBJECT_USRPTR_BUFFER(GClosure *closure,
+ GValue *return_value,
+ guint n_param_values,
+ const GValue *param_values,
+ gpointer invocation_hint,
+ gpointer marshal_data)
+{
+ typedef void (*GMarshalFunc_VOID__OBJECT_USRPTR_BUFFER)(gpointer data1,
+ gpointer arg_1, gpointer data2);
+ register GMarshalFunc_VOID__OBJECT_USRPTR_BUFFER callback;
+ register GCClosure *cc = (GCClosure*) closure;
+ register gpointer data1, data2;
+
+ g_return_if_fail (n_param_values == 2);
+
+ if (G_CCLOSURE_SWAP_DATA(closure)) {
+ data1 = closure->data;
+ data2 = g_value_peek_pointer(param_values + 0);
+ } else {
+ data1 = g_value_peek_pointer(param_values + 0);
+ data2 = closure->data;
+ }
+
+ callback = (GMarshalFunc_VOID__OBJECT_USRPTR_BUFFER)(marshal_data ? marshal_data : cc->callback);
+
+ callback(data1, g_marshal_value_peek_object(param_values + 1), data2);
+}
+
int gst_camerasrc_send_af_status(GstCameraSrc *camsrc , int state)
{
GstMessage *m = NULL;
* @start: when re-negotiation is finished.
*
*/
- gst_camerasrc_signals[SIGNAL_NEGO_COMPLETE] =
+ gst_camerasrc_signals[CAMERA_IN_SIGNAL_NEGO_COMPLETE] =
g_signal_new("nego-complete",
G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST,
gst_marshal_VOID__VOID,
G_TYPE_NONE, 0);
- /**
- * GstCameraSrc::still-capture:
- * @camerasrc: the camerasrc instance
- * @buffer: the buffer that will be pushed - Main
- * @buffer: the buffer that will be pushed - Thumbnail
- * @buffer: the buffer that will be pushed - Screennail
- *
- * This signal gets emitted before sending the buffer.
- */
- gst_camerasrc_signals[SIGNAL_STILL_CAPTURE] =
- g_signal_new("still-capture",
- G_TYPE_FROM_CLASS(klass),
- G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstCameraSrcClass, still_capture),
- NULL,
- NULL,
- gst_camerasrc_VOID__OBJECT_OBJECT,
- G_TYPE_NONE,
- 3, /* Number of parameter */
- GST_TYPE_BUFFER, /* Main image buffer */
- GST_TYPE_BUFFER, /* Thumbnail image buffer */
- GST_TYPE_BUFFER); /* Screennail image buffer */
+ /**
+ * GstCameraSrc::still-capture:
+ * @camerasrc: the camerasrc instance
+ * @buffer: the buffer that will be pushed - Main
+ * @buffer: the buffer that will be pushed - Thumbnail
+ * @buffer: the buffer that will be pushed - Screennail
+ *
+ * This signal gets emitted before sending the buffer.
+ */
+ gst_camerasrc_signals[CAMERA_IN_SIGNAL_STILL_CAPTURE] =
+ g_signal_new("still-capture",
+ G_TYPE_FROM_CLASS(klass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstCameraSrcClass, still_capture),
+ NULL,
+ NULL,
+ gst_camerasrc_VOID__OBJECT_OBJECT,
+ G_TYPE_NONE,
+ 3, /* Number of parameter */
+ GST_TYPE_BUFFER, /* Main image buffer */
+ GST_TYPE_BUFFER, /* Thumbnail image buffer */
+ GST_TYPE_BUFFER); /* Screennail image buffer */
+ /**
+ * GstCameraSrc::still-capture:
+ * @camerasrc: the camerasrc instance
+ * @buffer: the usrprt buffer that will be pulled
+ *
+ * This signal gets emitted before creating the buffer.
+ */
+ gst_camerasrc_signals[CAMERA_IN_SIGNAL_STILL_USRPTR_BUFFER] =
+ g_signal_new("still-capture-usrptr-buffer",
+ G_TYPE_FROM_CLASS(klass),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstCameraSrcClass, still_capture_usrptr_buffer),
+ NULL,
+ NULL,
+ gst_camerasrc_VOID__OBJECT_USRPTR_BUFFER,
+ G_TYPE_NONE,
+ 1, /* Number of parameter */
+ GST_TYPE_BUFFER); /* user ptr image buffer */
gst_camerasrc_override_photo_properties (gobject_class);
if (camerasrc->capture_mode == GST_CAMERA_SRC_CAPTURE_MODE_VIDEO &&
bclass->is_active (camerasrc)) {
- /* Stop autofocus in video mode */
- bclass->set_autofocus (camerasrc, FALSE);
- /* AutoExposure must be run always in video mode */
- bclass->set_autoexposure (camerasrc, TRUE);
}
if (bclass->set_capture_mode) {
bclass->set_capture_mode (camerasrc, camerasrc->capture_mode);
GST_DEBUG_OBJECT(camerasrc, "negotiation start");
basesrc_class->negotiate(GST_BASE_SRC(camerasrc));
camerasrc->req_negotiation = FALSE;
- g_signal_emit(G_OBJECT(camerasrc), gst_camerasrc_signals[SIGNAL_NEGO_COMPLETE], (GQuark)NULL);
+ g_signal_emit(G_OBJECT(camerasrc), gst_camerasrc_signals[CAMERA_IN_SIGNAL_NEGO_COMPLETE], (GQuark)NULL);
GST_DEBUG_OBJECT (camerasrc, "negotiation stop");
}
if (camerasrc->requested_af_mode == AF_ON_REQUESTED) {
gboolean ret;
- /* In still capture mode AE will be locked during AF operation */
- if (camerasrc->viewfinder_mode == GST_CAMERA_SRC_VIEWFINDER_MODE_STILL) {
- bclass->set_autoexposure (camerasrc, FALSE);
- }
ret = bclass->set_autofocus (camerasrc, TRUE);
-
if (ret) {
camerasrc->photo_capture_phase = GST_CAMERA_AUTOFOCUS;
- } else {
- /* Starting AF failed, so start AE again */
- bclass->set_autoexposure (camerasrc, TRUE);
}
} else {
- bclass->set_autofocus (camerasrc, FALSE);
- bclass->set_autoexposure (camerasrc, TRUE);
camerasrc->photo_capture_phase = GST_CAMERA_VIEWFINDER;
}
GST_LOG_OBJECT (camerasrc, "CALL: capture callback");
g_mutex_unlock (camerasrc->state_lock);
g_signal_emit( G_OBJECT (camerasrc),
- gst_camerasrc_signals[SIGNAL_STILL_CAPTURE],
+ gst_camerasrc_signals[CAMERA_IN_SIGNAL_STILL_CAPTURE],
0,
buf_cap_signal1,
NULL,
GST_WARNING ("This element has no bus, therefore no message sent!");
}
- /* In still capture mode we don't turn off AF algorithm yet, since it */
- /* would enable CAF. Instead, it is turned off when application */
- /* explicitly calls set_autofocus (FALSE), which in turn raises */
- /* af_requested = OFF flag and AF is finally stopped. */
-
- /* In video capture mode AF will be stopped immediately to enable AE */
- if (camsrc->viewfinder_mode == GST_CAMERA_SRC_VIEWFINDER_MODE_VIDEO) {
- bclass->set_autofocus (camsrc, FALSE);
- }
-
- /* We don't turn on autoexposure here either. This way AE stays */
- /* "locked" until application explicitly calls set_autofocus (FALSE). */
-
camsrc->photo_capture_phase = GST_CAMERA_AUTOFOCUS_DONE;
}
{
camera_class->get_capabilities = gst_camerasrc_default_capabilities;
camera_class->set_autofocus = gst_camerasrc_default_set_onoff;
- camera_class->set_autoexposure = gst_camerasrc_default_set_onoff;
camera_class->read_settings = gst_camerasrc_default_ret_true_with_settings;
camera_class->write_settings = gst_camerasrc_default_write_settings;
camera_class->get_supported_caps = gst_camerasrc_default_get_caps;
GST_CAMERA_CAPTURE_DONE
} GstCameraCapturePhase;
+/* Signal Enumerations */
+enum {
+ /*signal*/
+ CAMERA_IN_SIGNAL_STILL_CAPTURE,
+ CAMERA_IN_SIGNAL_NEGO_COMPLETE,
+ CAMERA_IN_SIGNAL_STILL_USRPTR_BUFFER,
+ CAMERA_IN_LAST_SIGNAL
+};
typedef struct _GstCameraSrc GstCameraSrc;
typedef struct _GstCameraSrcClass GstCameraSrcClass;
* @set_vfinder_mode: Set viewfinder mode.
* @set_capture_mode: Set capturing mode.
* @set_autofocus: Turn on / off autofocus algorithm.
- * @set_autoexposure: Turn on / off auto exposure algorithm.
* @write_settings: Write all GstPhotoSettings to subclass at once.
* @read_settings: Read all device settings to given GstPhotoSettings structure.
* @set_zoom: Set the zoom factor.
gboolean
(*set_autofocus) (GstCameraSrc *camsrc, gboolean on_off);
-
- gboolean
- (*set_autoexposure) (GstCameraSrc *camsrc, gboolean on_off);
-
gboolean
(*write_settings) (GstCameraSrc *camsrc,
GstPhotoSettings *photoconf,
/* signals */
void (*nego_complete) (GstElement *element);
void (*still_capture) (GstElement *element, GstBuffer *main, GstBuffer *sub, GstBuffer *scrnl);
+ void (*still_capture_usrptr_buffer) (GstElement *element, GstBuffer *usrptr);
/*< private >*/
PROP_INPUT_SENSOR,
PROP_USE_MMAP,
PROP_USE_COPY,
- PROP_AE,
+ PROP_3A,
PROP_AE_METERING_MODE,
PROP_AE_WINDOW,
PROP_AE_MODE,
- PROP_AF,
PROP_AF_METERING_MODE,
PROP_AF_WINDOW,
- PROP_AWB,
PROP_STILL_AF,
PROP_FOCUS_POSITION,
PROP_VFLIP,
PROP_DUMP_IMAGE,
PROP_DEBUG_FLAGS,
PROP_DISABLE_LOW_RES_CROP,
+ PROP_DRAW_CORNER_MARKERS,
} GstV4L2CamSrcProperties;
-
+static GstStaticPadTemplate gst_v4l2camsrc_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "format = (fourcc) NV12, "
+ "framerate = (fraction) [ 0, 200 ], "
+ "width = (int) [ 1, MAX ], "
+ "height = (int) [ 1, MAX ];"
+ "video/x-raw-yuv, "
+ "format = (fourcc) I420, "
+ "framerate = (fraction) [ 0, 200 ], "
+ "width = (int) [ 1, MAX ], "
+ "height = (int) [ 1, MAX ];"
+ "video/x-raw-bayer, "
+ "format = (fourcc) BA10, "
+ "framerate = (fraction) [ 0, 200 ], "
+ "width = (int) [ 1, MAX ], "
+ "height = (int) [ 1, MAX ];"
+ "video/x-vaapi-sharing, "
+ "type = vaapi, "
+ "width = (int) [ 1, MAX ], "
+ "height = (int) [ 1, MAX ], "
+ "framerate = (fraction) [ 0, MAX ]"));
#define DEFAULT_PROP_DEVICE_NAME NULL
#define DEFAULT_PROP_DEVICE "/dev/video0"
return GST_V4L2CAMSRC_IS_ACTIVE (v4l2camsrc);
}
+static gint caps_cache_cmp(struct cached_gst_caps *a, struct cached_gst_caps *b)
+{
+ int r;
+ r = a->input - b->input;
+ if(r)
+ return r;
+ r = a->mode - b->mode;
+ if(r)
+ return r;
+ return (long)(a->pixelformat) - (long)(b->pixelformat);
+}
+
/*
* gst_v4l2camsrc_v4l2fourcc_to_structure:
*
for (walk = v4l2camsrc->formats; walk; walk = walk->next) {
struct v4l2_fmtdesc *format;
+ GSList *it;
/* FIXME: Introduce own format structure */
format = (struct v4l2_fmtdesc *) walk->data;
+ struct cached_gst_caps *val, key = {
+ .input = v4l2camsrc->input_sensor,
+ .mode = v4l2camsrc->capture_mode,
+ .pixelformat = format->pixelformat
+ };
+
+ it = g_slist_find_custom(v4l2camsrc->caps_cache, &key, (GCompareFunc)caps_cache_cmp);
+ if(it) {
+ val = (struct cached_gst_caps *) it->data;
+ gst_caps_append(ret, gst_caps_ref(val->caps));
+ GST_DEBUG_OBJECT(v4l2camsrc, "Using cached caps for (sensor=%d,mode=%d,format=%0.4s)",
+ val->input,val->mode,(char*)&val->pixelformat);
+ continue;
+ }
+
+ GST_DEBUG_OBJECT(v4l2camsrc, "Now probing for (sensor=%d,mode=%d,format=%0.4s)",
+ key.input,key.mode,(char*)&key.pixelformat);
template = gst_v4l2camsrc_v4l2fourcc_to_structure (format->pixelformat);
if (template) {
tmp = gst_v4l2camsrc_probe_caps_for_format (v4l2camsrc,
format->pixelformat, template);
- if (tmp)
+ if (tmp) {
+ val = malloc(sizeof *val);
+ memcpy(val, &key, sizeof *val);
+ val->caps = gst_caps_ref(tmp);
+ v4l2camsrc->caps_cache = g_slist_prepend(v4l2camsrc->caps_cache, val);
gst_caps_append (ret, tmp);
+ }
gst_structure_free (template);
} else {
v4l2camsrc->probed_caps = gst_caps_ref (ret);
- GST_INFO_OBJECT(v4l2camsrc, "use GST_DEBUG >= 5 for probed caps");
- GST_LOG_OBJECT (v4l2camsrc, "probed caps: %" GST_PTR_FORMAT, ret);
+ GST_INFO_OBJECT (v4l2camsrc, "caps: %" GST_PTR_FORMAT, ret);
return ret;
}
return v4l2camsrc->num_buffers;
}
-/*
- * common format / caps utilities:
- */
-typedef struct
-{
- guint32 format;
- gboolean dimensions;
-} GstV4L2FormatDesc;
-
-static const GstV4L2FormatDesc gst_v4l2_formats[] = {
- /* from Linux 2.6.15 videodev2.h */
- {V4L2_PIX_FMT_YUV420, TRUE},
- {V4L2_PIX_FMT_YVU420, TRUE},
- {V4L2_PIX_FMT_YUV422P, TRUE},
- {V4L2_PIX_FMT_YUV444, TRUE},
-
- {V4L2_PIX_FMT_NV12, TRUE},
- {V4L2_PIX_FMT_NV21, TRUE},
-
- {V4L2_PIX_FMT_NV16, TRUE},
- {V4L2_PIX_FMT_NV61, TRUE},
-
- {V4L2_PIX_FMT_YUYV, TRUE},
- {V4L2_PIX_FMT_UYVY, TRUE},
-
- {V4L2_PIX_FMT_SBGGR16, TRUE},
-
- {V4L2_PIX_FMT_SBGGR8, TRUE},
- {V4L2_PIX_FMT_SGBRG8, TRUE},
- {V4L2_PIX_FMT_SGRBG8, TRUE},
- {V4L2_PIX_FMT_SRGGB8, TRUE},
-
- {V4L2_PIX_FMT_SBGGR10, TRUE},
- {V4L2_PIX_FMT_SGBRG10, TRUE},
- {V4L2_PIX_FMT_SGRBG10, TRUE},
- {V4L2_PIX_FMT_SRGGB10, TRUE},
-
- {V4L2_PIX_FMT_RGB24, TRUE},
- {V4L2_PIX_FMT_RGB32, TRUE},
- {V4L2_PIX_FMT_RGB565, TRUE},
-};
-
-#define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
-#define GST_V4L2_MAX_SIZE (1<<15) /* 2^15 == 32768 */
-
-GstCaps *
-gst_v4l2camsrc_get_all_caps (void)
-{
- static GstCaps *caps = NULL;
-
- if (caps == NULL) {
- GstStructure *structure;
-
- guint i;
-
- caps = gst_caps_new_empty ();
- for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
- structure =
- gst_v4l2camsrc_v4l2fourcc_to_structure (gst_v4l2_formats[i].format);
- if (structure) {
- if (gst_v4l2_formats[i].dimensions) {
- gst_structure_set (structure,
- "width", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
- "height", GST_TYPE_INT_RANGE, 1, GST_V4L2_MAX_SIZE,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, 100, 1, NULL);
- }
- gst_caps_append_structure (caps, structure);
- }
- }
- structure = gst_structure_from_string(surface_string, NULL);
- gst_caps_append_structure (caps, structure);
- }
-
- return gst_caps_ref (caps);
-}
-
/*
* gst_v4l2camsrc_base_init:
"Maemo Multimedia <multimedia@maemo.org>");
gst_element_class_add_pad_template (element_class,
- gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_v4l2camsrc_get_all_caps ()));
+ gst_static_pad_template_get (&gst_v4l2camsrc_template));
}
gboolean gst_v4l2camsrc_set_autofocus (GstCameraSrc * camsrc, gboolean on);
-gboolean gst_v4l2camsrc_set_autoexposure (GstCameraSrc * camsrc, gboolean on);
GstPhotoCaps gst_v4l2camsrc_get_capabilities (GstCameraSrc * camsrc);
gboolean gst_v4l2camsrc_set_capture_mode (GstCameraSrc * camsrc,
GstCameraSrcCaptureMode mode);
"Whether copy the buffer from driver, debug only", FALSE, G_PARAM_READWRITE));
/* AE, AF, and AWB settings */
- g_object_class_install_property (gobject_class, PROP_AE,
- g_param_spec_boolean ("ae", "Auto Exposure",
- "Auto Exposure is On or Off", TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
+ g_object_class_install_property (gobject_class, PROP_3A,
+ g_param_spec_boolean ("use-3a", "Use 3A",
+ "3A processsing is On or Off", TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
g_object_class_install_property (gobject_class, PROP_AE_METERING_MODE,
g_param_spec_enum ("ae-metering-mode", "AE Metering Mode",
DEFAULT_PROP_AE_WINDOW,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS ));
-
- g_object_class_install_property (gobject_class, PROP_AF,
- g_param_spec_boolean ("af", "Auto Focus",
- "Auto Focus is On or Off", TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
-
g_object_class_install_property (gobject_class, PROP_AF_METERING_MODE,
g_param_spec_enum ("af-metering-mode", "AF Metering Mode",
"Select AF Metering Mode",
DEFAULT_PROP_AF_WINDOW,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS ));
- g_object_class_install_property (gobject_class, PROP_AWB,
- g_param_spec_boolean ("awb", "White Balance",
- "White Balance is On or Off",
- TRUE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
-
g_object_class_install_property (gobject_class, PROP_STILL_AF,
g_param_spec_boolean ("still-af", "still image slow focus",
"Turn On or Off slow focus when doing the still image capture",
"disable software crop on unsupported low resolution frame size", FALSE,
G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class,
+ PROP_DRAW_CORNER_MARKERS,
+ g_param_spec_boolean ("draw-corner-markers", "draw markers in frame corners",
+ "draw markers in frame corners", FALSE,
+ G_PARAM_READWRITE));
+
camera_class->is_open = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_is_open);
camera_class->open = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_open);
camera_class->close = GST_DEBUG_FUNCPTR (gst_v4l2camsrc_close);
// camera_class->set_vfinder_mode = GST_DEBUG_FUNCPTR(gst_v4l2camsrc_set_viewfinder_mode);
camera_class->set_autofocus =
GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_autofocus);
- camera_class->set_autoexposure =
- GST_DEBUG_FUNCPTR (gst_v4l2camsrc_set_autoexposure);
camera_class->read_settings =
GST_DEBUG_FUNCPTR (gst_v4l2camsrc_read_settings);
camera_class->write_settings =
gst_v4l2camsrc_mfldadvci_wrapper_unload (GstMFLDV4l2CamSrc * v4l2camsrc)
{
GstV4l2MFLDAdvCI *mfldadvci = v4l2camsrc->mfldadvci;
+
+ GST_DEBUG ("mfldadvci unload");
g_module_close (v4l2camsrc->module);
v4l2camsrc->module = NULL;
v4l2camsrc->sc_enabled = TRUE;
v4l2camsrc->cc_updated = FALSE;
v4l2camsrc->gamma_updated = FALSE;
- v4l2camsrc->ae_enabled = TRUE;
- v4l2camsrc->af_enabled = TRUE;
- v4l2camsrc->awb_enabled = TRUE;
+ v4l2camsrc->ae_awb_af_enabled = TRUE;
v4l2camsrc->still_af = FALSE;
v4l2camsrc->bayer_downscaling = FALSE;
v4l2camsrc->tone.gamma = 2.2;
v4l2camsrc->raw_output_size = 0;
v4l2camsrc->debug_flags = DEFAULT_DEBUG_FLAGS;
v4l2camsrc->disable_low_res_crop = FALSE;
+ v4l2camsrc->draw_corner_markers = FALSE;
v4l2camsrc->device_mutex = g_mutex_new ();
+ v4l2camsrc->caps_cache = NULL;
+
v4l2camsrc->mfldadvci = gst_v4l2camsrc_mfldadvci_wrapper_load (v4l2camsrc);
v4l2camsrc->input_sensor = GST_CAMERA_INPUT_SENSOR_PRIMARY;
GST_DEBUG ("initialized to commit %s", MFLD_V4L2CAMSRC_VERSION);
}
+static void caps_cache_destroy_value(struct cached_gst_caps *val)
+{
+ if(!val)
+ return;
+ if(val->caps)
+ gst_caps_unref(val->caps);
+ free(val);
+}
static void
gst_v4l2camsrc_dispose (GObject * object)
{
GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (object);
+ if (v4l2camsrc->caps_cache)
+ g_slist_free_full(v4l2camsrc->caps_cache, (GDestroyNotify) caps_cache_destroy_value);
+ v4l2camsrc->caps_cache = NULL;
+
if (v4l2camsrc->formats) {
gst_v4l2camsrc_clear_format_list (v4l2camsrc);
}
G_OBJECT_CLASS (parent_class)->dispose (object);
libmfld_cam_dispose ();
+ gst_v4l2camsrc_mfldadvci_wrapper_unload(v4l2camsrc);
}
case PROP_USE_COPY:
v4l2camsrc->use_copy = g_value_get_boolean (value);
break;
- case PROP_AE:
- v4l2camsrc->ae_enabled = g_value_get_boolean (value);
+ case PROP_3A:
+ v4l2camsrc->ae_awb_af_enabled = g_value_get_boolean (value);
break;
case PROP_AE_WINDOW:
{
break;
}
- case PROP_AF:
- v4l2camsrc->af_enabled = g_value_get_boolean (value);
- break;
-
- case PROP_AWB:
- v4l2camsrc->awb_enabled = g_value_get_boolean (value);
- break;
case PROP_STILL_AF:
v4l2camsrc->still_af = g_value_get_boolean (value);
cam_set_autofocus (v4l2camsrc->still_af);
case PROP_DISABLE_LOW_RES_CROP:
v4l2camsrc->disable_low_res_crop = g_value_get_boolean (value);
break;
+ case PROP_DRAW_CORNER_MARKERS:
+ v4l2camsrc->draw_corner_markers = g_value_get_boolean (value);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
case PROP_USE_COPY:
g_value_set_boolean (value, v4l2camsrc->use_copy);
break;
- case PROP_AE:
- g_value_set_boolean (value, v4l2camsrc->ae_enabled);
+ case PROP_3A:
+ g_value_set_boolean (value, v4l2camsrc->ae_awb_af_enabled);
break;
case PROP_AE_METERING_MODE:
g_value_set_enum (value, v4l2camsrc->ae_metering_mode);
gst_structure_free(tmp);
break;
}
-
- case PROP_AF:
- g_value_set_boolean (value, v4l2camsrc->af_enabled);
- break;
case PROP_AF_METERING_MODE:
g_value_set_enum (value, v4l2camsrc->af_metering_mode);
break;
gst_structure_free(tmp);
break;
}
-
- case PROP_AWB:
- g_value_set_boolean (value, v4l2camsrc->awb_enabled);
- break;
case PROP_STILL_AF:
g_value_set_boolean (value, v4l2camsrc->still_af);
break;
case PROP_DISABLE_LOW_RES_CROP:
g_value_set_boolean (value, v4l2camsrc->disable_low_res_crop);
break;
+ case PROP_DRAW_CORNER_MARKERS:
+ g_value_set_boolean (value, v4l2camsrc->draw_corner_markers);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
gboolean is_vaapi_sharing;
};
+struct cached_gst_caps {
+ GstCameraInputSensor input;
+ GstCameraSrcCaptureMode mode;
+ guint32 pixelformat;
+ GstCaps *caps;
+};
+
/**
* GstMFLDV4l2CamSrc:
* @element: the parent element.
/* MFLD camera advanced features */
gboolean gdc_enabled, cac_enabled, ee_enabled, sc_enabled, dvs_enabled;
- gboolean rer_enabled, dis_enabled, ae_enabled, af_enabled, awb_enabled;
+ gboolean rer_enabled, dis_enabled, ae_awb_af_enabled;
gboolean blc_enabled, bpd_enabled;
gboolean caf_enabled;
GstCameraSrcDebugFlags debug_flags;
gboolean disable_low_res_crop;
+ gboolean draw_corner_markers;
+
+ GSList *caps_cache;
};
GST_DEBUG_CATEGORY_EXTERN (gst_v4l2camsrc_debug);
#define GST_CAT_DEFAULT gst_v4l2camsrc_debug
+extern guint gst_camerasrc_signals[CAMERA_IN_LAST_SIGNAL];
static const gint gst_v4l2camsrc_capture_map[] = {
CAM_VIEWFINDER_MODE_VIEWFINDER,
static GstV4l2Buffer *
gst_v4l2camsrc_buffer_new (GstMFLDV4l2CamSrcBufferPool * pool,
- GstMFLDV4l2CamSrc * v4l2camsrc, guint index, GstCaps * caps)
+ GstCameraSrc * camsrc, guint index, GstCaps * caps)
{
+ GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc);
GstV4l2Buffer *ret = NULL;
GstFlowReturn flow_ret;
+ GstBuffer *buf_cap_signal2 = NULL; /*output main buffer for capture signal*/
struct v4l2_buffer *vbuffer;
ret = (GstV4l2Buffer *) gst_mini_object_new (GST_TYPE_V4L2CAMSRC_BUFFER);
goto mmap_failed;
GST_BUFFER_DATA (ret) = (guint8 *) data;
} else {
- if (gst_pad_is_linked (GST_BASE_SRC_PAD (v4l2camsrc))) {
- GST_LOG ("using pad_alloc, size=%d", v4l2camsrc->frame_byte_size);
- GST_LOG ("ALLOC CAPS: %" GST_PTR_FORMAT, caps);
-
- flow_ret =
- gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (v4l2camsrc), 0LL,
- v4l2camsrc->frame_byte_size, caps, &ret->gbuffer);
- if (flow_ret != GST_FLOW_OK)
- goto pad_alloc_failed;
- GST_BUFFER_DATA (ret) = ret->gbuffer->data;
+
+ if (v4l2camsrc->capture_mode == GST_CAMERA_SRC_CAPTURE_MODE_STILL) {
+
+ /*call signal*/
+ /* alloc buffer for capture callback */
+ buf_cap_signal2 = gst_buffer_new ();
+
+ GST_BUFFER_DATA(buf_cap_signal2) = NULL;
+ GST_BUFFER_SIZE(buf_cap_signal2) = 0;
+ GST_BUFFER_CAPS(buf_cap_signal2) = gst_caps_new_simple("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, v4l2camsrc->capture_fourcc,
+ "width", G_TYPE_INT, v4l2camsrc->capture_w,
+ "height", G_TYPE_INT,v4l2camsrc->capture_h,
+ NULL);
+
+ GST_LOG_OBJECT (camsrc, "CALL: usrptr callback");
+ g_signal_emit( G_OBJECT (camsrc),
+ gst_camerasrc_signals[CAMERA_IN_SIGNAL_STILL_USRPTR_BUFFER],
+ 0,
+ buf_cap_signal2);
+
+ if (GST_BUFFER_DATA(buf_cap_signal2) == NULL)
+ goto usrptr_alloc_failed;
+
+ GST_LOG_OBJECT (camsrc, "RETURN: usrptr callback: buf=%p, size=%d",
+ GST_BUFFER_DATA(buf_cap_signal2), GST_BUFFER_SIZE(buf_cap_signal2));
+ GST_BUFFER_DATA (ret) = GST_BUFFER_DATA(buf_cap_signal2);
+ gst_buffer_unref(buf_cap_signal2);
+
} else {
- void *data;
+ if (gst_pad_is_linked (GST_BASE_SRC_PAD (v4l2camsrc))) {
+ GST_LOG ("using pad_alloc, size=%d", v4l2camsrc->frame_byte_size);
+ GST_LOG ("ALLOC CAPS: %" GST_PTR_FORMAT, caps);
+
+ flow_ret =
+ gst_pad_alloc_buffer_and_set_caps (GST_BASE_SRC_PAD (v4l2camsrc), 0LL,
+ v4l2camsrc->frame_byte_size, caps, &ret->gbuffer);
+ if (flow_ret != GST_FLOW_OK)
+ goto pad_alloc_failed;
+ GST_BUFFER_DATA (ret) = ret->gbuffer->data;
+ } else {
+ void *data;
- GST_LOG ("using posix_memalign");
- if (posix_memalign (&data, getpagesize (), vbuffer->length) != 0) {
- goto memalign_failed;
+ GST_LOG ("using posix_memalign");
+ if (posix_memalign (&data, getpagesize (), vbuffer->length) != 0) {
+ goto memalign_failed;
+ }
+ GST_BUFFER_DATA (ret) = (guint8 *) data;
}
- GST_BUFFER_DATA (ret) = (guint8 *) data;
}
}
GST_LOG (" field: %d", vbuffer->field);
GST_LOG (" memory: %d", vbuffer->memory);
if (vbuffer->memory == V4L2_MEMORY_MMAP)
- GST_LOG (" MMAP offset: %u", vbuffer->m.offset);
+ GST_LOG (" MMAP offset: %p", vbuffer->m.offset);
else if (vbuffer->memory == V4L2_MEMORY_USERPTR)
- GST_LOG (" user address: %u", vbuffer->m.userptr);
+ GST_LOG (" user address: %p", vbuffer->m.userptr);
GST_LOG (" length: %u", vbuffer->length);
GST_LOG (" input: %u", vbuffer->input);
#endif
g_free (vbuffer);
return NULL;
}
+usrptr_alloc_failed:
+ {
+ GST_WARNING ("Failed to alloc usrptr buffer");
+ g_free (vbuffer);
+ return NULL;
+ }
mmap_failed:
{
gint errnosave = errno;
/*
*/
static GstMFLDV4l2CamSrcBufferPool *
-gst_v4l2camsrc_buffer_pool_new (GstMFLDV4l2CamSrc * v4l2camsrc, gint fd,
+gst_v4l2camsrc_buffer_pool_new (GstCameraSrc * camsrc, gint fd,
GstCaps * caps)
{
+ GstMFLDV4l2CamSrc *v4l2camsrc = GST_V4L2CAMSRC (camsrc);
GstMFLDV4l2CamSrcBufferPool *pool;
gint n;
pool->queued = g_new0 (guint, pool->buffer_count);
for (n = 0; n < pool->buffer_count; n++) {
- pool->buffers[n] = gst_v4l2camsrc_buffer_new (pool, v4l2camsrc, n, caps);
+ pool->buffers[n] = gst_v4l2camsrc_buffer_new (pool, camsrc, n, caps);
GST_LOG ("buffer ref is %d", GST_MINI_OBJECT_REFCOUNT (pool->buffers[n]));
if (!pool->buffers[n])
goto buffer_new_failed;
g_mutex_lock (pool->lock);
pool->running = FALSE;
- pool->is_vaapi_sharing = FALSE;
g_mutex_unlock (pool->lock);
GST_DEBUG ("destroy pool");
gst_buffer_unref (buf);
}
}
-
+ pool->is_vaapi_sharing = FALSE;
gst_mini_object_unref (GST_MINI_OBJECT (pool));
}
gst_v4l2camsrc_format_get_rank (fa);
}
+static gboolean is_supported_pixelformat(guint32 fourcc)
+{
+ if(fourcc == V4L2_PIX_FMT_NV12
+ || fourcc == V4L2_PIX_FMT_YUV420
+ || fourcc == V4L2_PIX_FMT_SGRBG10)
+ return TRUE;
+ return FALSE;
+}
+
/******************************************************
* gst_v4l2camsrc_fill_format_list():
* create list of supported capture formats
goto failed;
}
}
+ if(!is_supported_pixelformat(format->pixelformat)) {
+ GST_LOG_OBJECT(v4l2camsrc, " (skipping format %" GST_FOURCC_FORMAT ")",GST_FOURCC_ARGS (format->pixelformat));
+ continue;
+ }
GST_LOG_OBJECT (v4l2camsrc, "index: %u", format->index);
GST_LOG_OBJECT (v4l2camsrc, "type: %d", format->type);
return ret;
}
+static void draw_corner_markers_nv12(unsigned char *buf, unsigned w, unsigned h)
+{
+ int i,j;
+#define put_dot(x,y) buf[(x)+w*(y)] = (((x+y)&0x01)?0x00:0xff)
+
+ for(j = 0; j < 4; ++j)
+ for(i = 0; i < 8; ++i) {
+ put_dot(i, j); put_dot(w-1-i, j);
+ put_dot(j, i); put_dot(j, h-1-i);
+ put_dot(i, h-1-j); put_dot(w-1-i, h-1-j);
+ put_dot(w-1-j, i); put_dot(w-1-j, h-1-i);
+ }
+#undef put_dot
+}
+
/* Crop to upper left corner of NV12 frame. */
static gboolean crop_buffer_inplace_nv12(unsigned char *buf, unsigned in_w, unsigned in_h,
unsigned out_w, unsigned out_h)
in += in_w;
out += out_w;
}
-#if 0
- /* B/W Marker to top left corner */
-#define put_dot(x,y,c) buf[(y)*out_w+x] = c
- put_dot(0,0,0xff); put_dot(1,0,0x00); put_dot(2,0,0xff); put_dot(3,0,0x00); put_dot(4,0,0xff); put_dot(5,0,0x00); put_dot(6,0,0xff); put_dot(7,0,0x00);
- put_dot(0,1,0x00); put_dot(1,1,0xff); put_dot(2,1,0x00); put_dot(3,1,0xff); put_dot(4,1,0x00); put_dot(5,1,0xff); put_dot(6,1,0x00); put_dot(7,1,0xff);
- put_dot(0,2,0xff); put_dot(1,2,0x00);
- put_dot(0,3,0x00); put_dot(1,3,0xff);
- put_dot(0,4,0xff); put_dot(1,4,0x00);
- put_dot(0,5,0x00); put_dot(1,5,0xff);
- put_dot(0,6,0xff); put_dot(1,6,0x00);
- put_dot(0,7,0x00); put_dot(1,7,0xff);
-#undef put_dot
-#endif
return TRUE;
}
struct cam_capture_settings st;
st.mmap = v4l2camsrc->use_mmap;
- st.ae = v4l2camsrc->ae_enabled;
- st.af = v4l2camsrc->af_enabled;
- st.awb = v4l2camsrc->awb_enabled;
+ st.ae_awb_af = v4l2camsrc->ae_awb_af_enabled;
st.dump_raw = v4l2camsrc->dump_raw;
st.raw_output_size = v4l2camsrc->raw_output_size;
st.dump_image = v4l2camsrc->dump_image;
err = cam_capture_frames (v4l2camsrc->video_fd, buffer, &st);
- if (v4l2camsrc->ae_enabled || v4l2camsrc->af_enabled ||
- v4l2camsrc->awb_enabled)
+ if (v4l2camsrc->ae_awb_af_enabled)
mfldcam_3a_start ();
ret = (err == CAM_ERR_NONE);
}
}
+ if(v4l2camsrc->draw_corner_markers)
+ draw_corner_markers_nv12(GST_BUFFER_DATA(pool_buffer),
+ v4l2camsrc->expected_capture_w, v4l2camsrc->expected_capture_h);
+
g_mutex_unlock (v4l2camsrc->pool->lock);
/* this can change at every frame, esp. with jpeg */
GST_LOG_OBJECT (v4l2camsrc, "initiating buffer pool");
if (!(v4l2camsrc->pool =
- gst_v4l2camsrc_buffer_pool_new (v4l2camsrc, fd, caps)))
+ gst_v4l2camsrc_buffer_pool_new (camsrc, fd, caps)))
goto buffer_pool_new_failed;
GST_INFO_OBJECT (v4l2camsrc, "capturing buffers");
{
GstMFLDV4l2CamSrc *v4l2camsrc;
gboolean ret = FALSE;
+ cam_err_t err = CAM_ERR_NONE;
v4l2camsrc = GST_V4L2CAMSRC (camsrc);
-
g_mutex_lock (v4l2camsrc->device_mutex);
-
- cam_err_t err = CAM_ERR_NONE;
-
err= cam_set_autofocus(on);
- v4l2camsrc->af_enabled = on;
-
ret = (err == CAM_ERR_NONE);
-
g_mutex_unlock (v4l2camsrc->device_mutex);
GST_DEBUG ("setting autofocus: %s", ret ? "ok" : "failed");
-
- return ret;
-}
-
-/*
- *
- */
-gboolean
-gst_v4l2camsrc_set_autoexposure (GstCameraSrc * camsrc, gboolean on)
-{
- GstMFLDV4l2CamSrc *v4l2camsrc;
- gboolean ret = FALSE;
- gchar *mode;
-
- v4l2camsrc = GST_V4L2CAMSRC (camsrc);
- mode = on ? "ON" : "OFF";
-
- GST_DEBUG_OBJECT (v4l2camsrc, "setting autoexposure: %s", mode);
-
-
- g_mutex_lock (v4l2camsrc->device_mutex);
-
- cam_err_t err = CAM_ERR_NONE;
-
- err = cam_set_autoexposure(v4l2camsrc->video_fd, on);
- v4l2camsrc->ae_enabled = on;
-
- ret = (err == CAM_ERR_NONE);
- g_mutex_unlock (v4l2camsrc->device_mutex);
-
- GST_DEBUG_OBJECT (v4l2camsrc, "Setting autoexposure %s: %s", mode,
- ret ? "SUCCESS" : "FAILED");
-
return ret;
}
-
/*
*
*/
gst_v4l2camsrc_libmfldcam_init (GstMFLDV4l2CamSrc * v4l2camsrc)
{
cam_err_t err;
+ struct v4l2_input input;
+ int ret;
+ char *name, *space;
+
+ memset(&input, 0, sizeof(input));
if (v4l2camsrc->input_sensor == GST_CAMERA_INPUT_SENSOR_PRIMARY)
- err = cam_driver_init (v4l2camsrc->video_fd, "mt9e013");
+ input.index = V2L2_CAMERA_INPUT_SENSOR_PRIMARY;
else
- err = cam_driver_init (v4l2camsrc->video_fd, "mt9m114");
+ input.index = V2L2_CAMERA_INPUT_SENSOR_SECONDARY;
+
+ ret = ioctl(v4l2camsrc->video_fd, VIDIOC_ENUMINPUT, &input);
+ if (ret < 0) {
+ return FALSE;
+ }
+ name = (char*)input.name;
+ space = strchr(name, ' ');
+ if (space > name) {
+ name[space - name] = '\0';
+ }
+ GST_DEBUG_OBJECT (v4l2camsrc, "sensor name %s", name);
+
+ err = cam_driver_init (v4l2camsrc->video_fd, name);
if (err != CAM_ERR_NONE) {
GST_WARNING ("libmfldcam initialization failed");
v4l2camsrc->videodev);
gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_VFLIP,v4l2camsrc->vflip);
- gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_HFLIP,v4l2camsrc->hflip);
+ if (v4l2camsrc->input_sensor == GST_CAMERA_INPUT_SENSOR_SECONDARY)
+ gst_v4l2camsrc_set_attribute (GST_CAMERA_SRC (v4l2camsrc),V4L2_CID_HFLIP,v4l2camsrc->hflip);
v4l2camsrc->initialized = TRUE;
v4l2camsrc->is_open = TRUE;
+* Fri Mar 01 2013 Jussi Saavalainen <jussi.saavalainen@ixonos.com> submit/trunk/20130219.070004@140594f
+- Use correct data type for BA10 in static caps
+- combine ae/af/awb enable/disable to one 3a property.
+- Cache and reuse probed caps
+- use VIDIOC_ENUMINPUT to get camera sensor name.
+- add static pad template for caps, dropping unneeded pixelformats.
+- Buffer sharing implementation for still image capture TZSP-4953
+- remove extra frame skips from startup.
+- initialise horisontal flip only to secondary camera.
+- unload 3a in dispose.
+- buffer-sharing: fix bug of vaapi buffer leak
+- Skip unneeded pixelformats during probe
+- Make buffer address logging consistent
+- Add option to draw markers on frame corners
+- remove obsolete header.
+
* Fri Feb 01 2013 Jussi Saavalainen <jussi.saavalainen@ixonos.com> accepted/tizen_2.0/20130128.180320@3e02944
- Remove low-res frame ISP padding with software crop. TZSP-4269
Name: gst-plugins-atomisp
Summary: Camera source component for Intel Medfield ISP
-Version: 0.52
+Version: 0.53
Release: 1
Group: Applications/Multimedia
License: LGPLv2+