want_generic="yes"
case "$host_os" in
- mingw* | cegcc*)
+ mingw*)
want_xine="no"
want_gstreamer="yes"
+ want_generic="no"
want_generic_vlc="no"
;;
*)
### Checks for header files
AC_HEADER_STDC
+AC_CHECK_HEADERS([unistd.h])
+
### Checks for types
-#include "config.h"
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
-#include "emotion_private.h"
-#include "Emotion.h"
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <stdio.h>
#ifdef EMOTION_HAVE_EEZE
-#include <sys/types.h>
-#include <sys/stat.h>
-#include <fcntl.h>
+# include <sys/types.h>
+# include <sys/stat.h>
+# include <fcntl.h>
# include <sys/ioctl.h>
# ifdef HAVE_V4L2
# include <linux/videodev2.h>
#include <Eet.h>
+#include "Emotion.h"
+#include "emotion_private.h"
+
EAPI int EMOTION_WEBCAM_UPDATE = 0;
struct ext_match_s
#ifndef EMOTION_PRIVATE_H
#define EMOTION_PRIVATE_H
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-
-#include <Evas.h>
-#include <Ecore.h>
-#include <Emotion.h>
-
#define META_TRACK_TITLE 1
#define META_TRACK_ARTIST 2
#define META_TRACK_GENRE 3
-#include "emotion_private.h"
-#include "Emotion.h"
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <Evas.h>
+#include <Ecore.h>
#ifdef HAVE_EIO
# include <math.h>
# include <Eio.h>
#endif
+#include "Emotion.h"
+#include "emotion_private.h"
+
+#ifdef _WIN32
+# define FMT_UCHAR "%c"
+#else
+# define FMT_UCHAR "%hhu"
+#endif
+
#define E_SMART_OBJ_GET(smart, o, type) \
{ \
char *_e_smart_str; \
double ir;
double r;
- int aspect_opt;
+ int aspect_opt = 0;
iw = sd->video.w;
ih = sd->video.h;
Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
- DBG("play=%hhu, was=%hhu", play, sd->play);
+ DBG("play=" FMT_UCHAR ", was=" FMT_UCHAR, play, sd->play);
if (play == sd->play) return;
if (!sd->module) return;
if (!sd->video_data) return;
Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
- DBG("mute=%hhu", mute);
+ DBG("mute=" FMT_UCHAR, mute);
if (!sd->module) return;
if (!sd->video_data) return;
sd->module->audio_channel_mute_set(sd->video_data, mute);
Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
- DBG("mute=%hhu", mute);
+ DBG("mute=" FMT_UCHAR, mute);
if (!sd->module) return;
if (!sd->video_data) return;
sd->module->video_channel_mute_set(sd->video_data, mute);
Smart_Data *sd;
E_SMART_OBJ_GET(sd, obj, E_OBJ_NAME);
- DBG("mute=%hhu", mute);
+ DBG("mute=" FMT_UCHAR, mute);
if (!sd->module) return;
if (!sd->video_data) return;
sd->module->spu_channel_mute_set(sd->video_data, mute);
#ifndef EMOTION_GENERIC_PLUGIN_H
#define EMOTION_GENERIC_PLUGIN_H
-#include <semaphore.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <fcntl.h>
+#ifdef _WIN32
+# include <windows.h>
+#else
+#include <semaphore.h>
+#endif
+
#define DEFAULTWIDTH 320
#define DEFAULTHEIGHT 240
#define DEFAULTPITCH 4
int last;
int next;
} frame;
+ /* FIXME: maybe abstracting that in Eina ? */
+#ifdef _WIN32
+ HANDLE lock;
+#else
sem_t lock;
+#endif
int frame_drop;
};
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
+
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/time.h>
#include <sys/types.h>
#include <fcntl.h>
#include <unistd.h>
+#include <errno.h>
+
#include <Eina.h>
#include <Evas.h>
+#include <Ecore.h>
#include "Emotion.h"
#include "emotion_private.h"
vs = mmap(NULL, size, PROT_READ|PROT_WRITE, MAP_SHARED, shmfd, 0);
if (vs == MAP_FAILED)
{
- ERR("error when mapping shared memory.\n");
+ ERR("error when mapping shared memory");
return EINA_FALSE;
}
vs->frame.last = 2;
vs->frame.next = 2;
vs->frame_drop = 0;
+#ifdef _WIN32
+ /* FIXME: maximum count for the semaphore: 10. Is it sufficient ? */
+ vs->lock = CreateSemaphore(NULL, 1, 10, NULL);
+ if (!vs->lock)
+ {
+ ERR("can not create semaphore");
+ return EINA_FALSE;
+ }
+#else
sem_init(&vs->lock, 1, 1);
+#endif
ev->frame.frames[0] = (unsigned char *)vs + sizeof(*vs);
ev->frame.frames[1] = (unsigned char *)vs + sizeof(*vs) + vs->height * vs->width * vs->pitch;
ev->frame.frames[2] = (unsigned char *)vs + sizeof(*vs) + 2 * vs->height * vs->width * vs->pitch;
_player_file_closed(Emotion_Generic_Video *ev)
{
INF("Closed previous file.");
+#ifdef _WIN32
+ CloseHandle(ev->shared->lock);
+#else
sem_destroy(&ev->shared->lock);
+#endif
ev->closing = EINA_FALSE;
ECORE_EXE_PIPE_READ_LINE_BUFFERED | ECORE_EXE_NOT_LEADER,
ev);
- INF("created pipe emotion -> player: %d -> %d\n", pipe_out[1], pipe_out[0]);
- INF("created pipe player -> emotion: %d -> %d\n", pipe_in[1], pipe_in[0]);
+ INF("created pipe emotion -> player: %d -> %d", pipe_out[1], pipe_out[0]);
+ INF("created pipe player -> emotion: %d -> %d", pipe_in[1], pipe_in[0]);
close(pipe_in[1]);
close(pipe_out[0]);
em_bgra_data_get(void *data, unsigned char **bgra_data)
{
Emotion_Generic_Video *ev = data;
+#ifdef _WIN32
+ DWORD res;
+#endif
if (!ev || !ev->file_ready)
return 0;
// lock frame here
+#ifdef _WIN32
+ res = WaitForSingleObject(ev->shared->lock, 0L);
+ if (res != WAIT_OBJECT_0)
+ return 0;
+#else
sem_wait(&ev->shared->lock);
+#endif
// send current frame to emotion
if (ev->shared->frame.emotion != ev->shared->frame.last)
ev->shared->frame_drop = 0;
// unlock frame here
+#ifdef _WIN32
+ ReleaseSemaphore(ev->shared->lock, 1, NULL);
+#else
sem_post(&ev->shared->lock);
+#endif
ev->drop = 0;
return 1;
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <Eina.h>
+#include <Evas.h>
+#include <Ecore.h>
+
#include <glib.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideosink.h>
-#include <Ecore.h>
-
+#include "Emotion.h"
#include "emotion_gstreamer.h"
Emotion_Gstreamer_Buffer *
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <Eina.h>
+#include <Evas.h>
+
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideosink.h>
+
+#include "Emotion.h"
#include "emotion_gstreamer.h"
static inline void
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <Eina.h>
+#include <Evas.h>
+
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstvideosink.h>
+
+#include "Emotion.h"
#include "emotion_gstreamer.h"
typedef struct _FakeEOSBin
-#include <unistd.h>
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#ifdef HAVE_UNISTD_H
+# include <unistd.h>
+#endif
#include <fcntl.h>
#include <Eina.h>
+#include <Evas.h>
+#include <Ecore.h>
+
+#define HTTP_STREAM 0
+#define RTSP_STREAM 1
+#include <glib.h>
+#include <gst/gst.h>
+#include <glib-object.h>
+#include <gst/video/gstvideosink.h>
+#include <gst/video/video.h>
+#ifdef HAVE_ECORE_X
+# include <Ecore_X.h>
+# include <Ecore_Evas.h>
+# ifdef HAVE_XOVERLAY_H
+# include <gst/interfaces/xoverlay.h>
+# endif
+#endif
+
+#include "Emotion.h"
#include "emotion_private.h"
#include "emotion_gstreamer.h"
-#include "Emotion.h"
Eina_Bool window_manager_video = EINA_FALSE;
int _emotion_gstreamer_log_domain = -1;
if (ev->xvpad) gst_object_unref(ev->xvpad);
ev->xvpad = NULL;
+#ifdef HAVE_ECORE_X
fprintf(stderr, "destroying window: %i\n", ev->win);
if (ev->win) ecore_x_window_free(ev->win);
ev->win = 0;
+#endif
}
EINA_LIST_FREE(ev->audio_streams, astream)
return ev->stream;
}
+#ifdef HAVE_ECORE_X
static Eina_Bool
_ecore_event_x_destroy(void *data __UNUSED__, int type __UNUSED__, void *event __UNUSED__)
{
return EINA_TRUE;
}
+#endif
static Eina_Bool
module_open(Evas_Object *obj,
if (!em_module.init(obj, video, opt))
return EINA_FALSE;
+#ifdef HAVE_ECORE_X
ecore_event_handler_add(ECORE_X_EVENT_WINDOW_DESTROY, _ecore_event_x_destroy, NULL);
+#endif
if (getenv("EMOTION_FPS_DEBUG")) debug_fps = EINA_TRUE;
#ifndef __EMOTION_GSTREAMER_H__
#define __EMOTION_GSTREAMER_H__
-#ifdef HAVE_CONFIG_H
-# include "config.h"
-#endif
-
-#include <Evas.h>
-#include <Ecore.h>
-
-#ifdef HAVE_ECORE_X
-# include <Ecore_X.h>
-# include <Ecore_Evas.h>
-# ifdef HAVE_XOVERLAY_H
-# include <gst/interfaces/xoverlay.h>
-# endif
-#endif
-
-#define HTTP_STREAM 0
-#define RTSP_STREAM 1
-#include <glib.h>
-#include <gst/gst.h>
-#include <glib-object.h>
-#include <gst/video/gstvideosink.h>
-#include <gst/video/video.h>
-
-#include "emotion_private.h"
-
typedef void (*Evas_Video_Convert_Cb)(unsigned char *evas_data,
const unsigned char *gst_data,
unsigned int w,
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <Eina.h>
+#include <Evas.h>
+#include <Ecore.h>
+
+#define HTTP_STREAM 0
+#define RTSP_STREAM 1
+#include <glib.h>
+#include <gst/gst.h>
+#include <glib-object.h>
+#include <gst/video/gstvideosink.h>
+#include <gst/video/video.h>
+
+#include "Emotion.h"
+#include "emotion_private.h"
#include "emotion_gstreamer.h"
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE("sink",
_video_resize(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
Evas_Coord w, Evas_Coord h)
{
+#ifdef HAVE_ECORE_X
Emotion_Gstreamer_Video *ev = data;
-#ifdef HAVE_ECORE_X
ecore_x_window_resize(ev->win, w, h);
#endif
fprintf(stderr, "resize: %i, %i\n", w, h);
_video_move(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__,
Evas_Coord x, Evas_Coord y)
{
- Emotion_Gstreamer_Video *ev = data;
#ifdef HAVE_ECORE_X
+ Emotion_Gstreamer_Video *ev = data;
unsigned int pos[2];
fprintf(stderr, "move: %i, %i\n", x, y);
static void
_video_show(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
{
+#ifdef HAVE_ECORE_X
Emotion_Gstreamer_Video *ev = data;
fprintf(stderr, "show xv\n");
-#ifdef HAVE_ECORE_X
ecore_x_window_show(ev->win);
#endif
/* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_link_cb, ev); */
static void
_video_hide(void *data, Evas_Object *obj __UNUSED__, const Evas_Video_Surface *surface __UNUSED__)
{
+#ifdef HAVE_ECORE_X
Emotion_Gstreamer_Video *ev = data;
fprintf(stderr, "hide xv\n");
-#ifdef HAVE_ECORE_X
ecore_x_window_hide(ev->win);
#endif
/* gst_pad_set_blocked_async(ev->teepad, TRUE, _block_pad_unlink_cb, ev); */