}
static void
-video_obj_frame_decode_cb(void *data, Evas_Object *obj, void *event_info)
+video_obj_time_changed(Evas_Object *obj, Evas_Object *edje)
{
- Evas_Object *oe;
- double pos, len;
+ double pos, len, scale;
char buf[256];
int ph, pm, ps, pf, lh, lm, ls;
- oe = data;
pos = emotion_object_position_get(obj);
len = emotion_object_play_length_get(obj);
// printf("%3.3f, %3.3f\n", pos, len);
- edje_object_part_drag_value_set(oe, "video_progress", pos / len, 0.0);
+ scale = (len > 0.0) ? pos / len : 0.0;
+ edje_object_part_drag_value_set(edje, "video_progress", scale, 0.0);
lh = len / 3600;
lm = len / 60 - (lh * 60);
ls = len - (lm * 60);
pf = pos * 100 - (ps * 100) - (pm * 60 * 100) - (ph * 60 * 60 * 100);
snprintf(buf, sizeof(buf), "%i:%02i:%02i.%02i / %i:%02i:%02i",
ph, pm, ps, pf, lh, lm, ls);
- edje_object_part_text_set(oe, "video_progress_txt", buf);
+ edje_object_part_text_set(edje, "video_progress_txt", buf);
+}
+
+static void
+video_obj_frame_decode_cb(void *data, Evas_Object *obj, void *event_info)
+{
+ video_obj_time_changed(obj, data);
if (0)
{
static void
video_obj_length_change_cb(void *data, Evas_Object *obj, void *event_info)
{
- Evas_Object *oe;
- double pos, len;
- char buf[256];
- int ph, pm, ps, pf, lh, lm, ls;
+ video_obj_time_changed(obj, data);
+}
- oe = data;
- pos = emotion_object_position_get(obj);
- len = emotion_object_play_length_get(obj);
- edje_object_part_drag_value_set(oe, "video_progress", pos / len, 0.0);
- lh = len / 3600;
- lm = len / 60 - (lh * 60);
- ls = len - (lm * 60);
- ph = pos / 3600;
- pm = pos / 60 - (ph * 60);
- ps = pos - (pm * 60);
- pf = pos * 100 - (ps * 100) - (pm * 60 * 100) - (ph * 60 * 60 * 100);
- snprintf(buf, sizeof(buf), "%i:%02i:%02i.%02i / %i:%02i:%02i",
- ph, pm, ps, pf, lh, lm, ls);
- edje_object_part_text_set(oe, "video_progress_txt", buf);
+static void
+video_obj_position_update_cb(void *data, Evas_Object *obj, void *event_info)
+{
+ video_obj_time_changed(obj, data);
}
static void
evas_object_smart_callback_add(o, "frame_decode", video_obj_frame_decode_cb, oe);
evas_object_smart_callback_add(o, "frame_resize", video_obj_frame_resize_cb, oe);
evas_object_smart_callback_add(o, "length_change", video_obj_length_change_cb, oe);
+ evas_object_smart_callback_add(o, "position_update", video_obj_position_update_cb, oe);
evas_object_smart_callback_add(o, "decode_stop", video_obj_stopped_cb, oe);
evas_object_smart_callback_add(o, "channels_change", video_obj_channels_cb, oe);
return 1;
}
+static int
+check_positions(void *data)
+{
+ const Evas_List *lst;
+
+ for (lst = video_objs; lst != NULL; lst = lst->next)
+ video_obj_time_changed(lst->data, evas_object_smart_parent_get(lst->data));
+
+ return !!video_objs;
+}
+
int
main(int argc, char **argv)
{
}
ecore_idle_enterer_add(enter_idle, NULL);
+ ecore_animator_add(check_positions, NULL);
ecore_main_loop_begin();
main_stop();
em_len_get(void *video)
{
Emotion_Gstreamer_Video *ev;
- Emotion_Video_Sink *vsink;
+ Emotion_Video_Sink *vsink;
+ Emotion_Audio_Sink *asink;
+ GstFormat fmt;
+ gint64 val;
+ gboolean ret;
+
+ ev = video;
+ fmt = GST_FORMAT_TIME;
+ ret = gst_element_query_duration(ev->pipeline, &fmt, &val);
+ if (!ret)
+ goto fallback;
+
+ if (fmt != GST_FORMAT_TIME)
+ {
+ fprintf(stderr, "requrested duration in time, but got %s instead.",
+ gst_format_get_name(fmt));
+ goto fallback;
+ }
- ev = (Emotion_Gstreamer_Video *)video;
+ if (val <= 0.0)
+ goto fallback;
- vsink = (Emotion_Video_Sink *)ecore_list_index_goto(ev->video_sinks, ev->video_sink_nbr);
- if (vsink)
- return (double)vsink->length_time;
+ return val / 1000000000.0;
+
+ fallback:
+ fputs("Gstreamer reported no length, try existing sinks...\n", stderr);
+
+ ecore_list_first_goto(ev->audio_sinks);
+ while ((asink = ecore_list_next(ev->audio_sinks)) != NULL)
+ if (asink->length_time >= 0)
+ return asink->length_time;
+
+ ecore_list_first_goto(ev->video_sinks);
+ while ((vsink = ecore_list_next(ev->video_sinks)) != NULL)
+ if (vsink->length_time >= 0)
+ return vsink->length_time;
return 0.0;
}
em_pos_get(void *video)
{
Emotion_Gstreamer_Video *ev;
+ GstFormat fmt;
+ gint64 val;
+ gboolean ret;
- ev = (Emotion_Gstreamer_Video *)video;
+ ev = video;
+ fmt = GST_FORMAT_TIME;
+ ret = gst_element_query_position(ev->pipeline, &fmt, &val);
+ if (!ret)
+ return ev->position;
+
+ if (fmt != GST_FORMAT_TIME)
+ {
+ fprintf(stderr, "requrested position in time, but got %s instead.",
+ gst_format_get_name(fmt));
+ return ev->position;
+ }
+ ev->position = val / 1000000000.0;
return ev->position;
}