diff options
author | Wim Taymans <wim.taymans@gmail.com> | 2004-05-28 18:01:34 +0000 |
---|---|---|
committer | Wim Taymans <wim.taymans@gmail.com> | 2004-05-28 18:01:34 +0000 |
commit | 8b7c3ac223c33e03d34daddc2a6f4dd795d00f34 (patch) | |
tree | 458b6986b7b567290190919a62eceb66181a80b1 | |
parent | 3837845a3b7ce5075c987f629a1c89155b9a94a7 (diff) |
A plugin to add an alpha channel to I420 video. Can optionally do chroma keying.
Original commit message from CVS:
* configure.ac:
* gst/alpha/Makefile.am:
* gst/alpha/gstalpha.c: (gst_alpha_method_get_type),
(gst_alpha_get_type), (gst_alpha_base_init),
(gst_alpha_class_init), (gst_alpha_init), (gst_alpha_set_property),
(gst_alpha_get_property), (gst_alpha_sink_link), (gst_alpha_add),
(gst_alpha_chroma_key), (gst_alpha_chain),
(gst_alpha_change_state), (plugin_init):
A plugin to add an alpha channel to I420 video. Can optionally do
chroma keying.
* gst/multipart/Makefile.am:
* gst/multipart/multipart.c: (plugin_init):
* gst/multipart/multipartdemux.c: (gst_multipart_demux_base_init),
(gst_multipart_demux_class_init), (gst_multipart_demux_init),
(gst_multipart_demux_finalize), (gst_multipart_demux_handle_event),
(gst_multipart_find_pad_by_mime), (gst_multipart_demux_chain),
(gst_multipart_demux_change_state),
(gst_multipart_demux_plugin_init):
* gst/multipart/multipartmux.c: (gst_multipart_mux_get_type),
(gst_multipart_mux_base_init), (gst_multipart_mux_class_init),
(gst_multipart_mux_get_sink_event_masks), (gst_multipart_mux_init),
(gst_multipart_mux_sinkconnect), (gst_multipart_mux_pad_link),
(gst_multipart_mux_pad_unlink),
(gst_multipart_mux_request_new_pad),
(gst_multipart_mux_handle_src_event),
(gst_multipart_mux_next_buffer), (gst_multipart_mux_compare_pads),
(gst_multipart_mux_queue_pads), (gst_multipart_mux_loop),
(gst_multipart_mux_get_property), (gst_multipart_mux_set_property),
(gst_multipart_mux_change_state), (gst_multipart_mux_plugin_init):
A Multipart demuxer/muxer. Not sure if it violates specs. Used to
send multipart jpeg images to a browser.
* gst/videobox/Makefile.am:
* gst/videobox/README:
* gst/videobox/gstvideobox.c: (gst_video_box_fill_get_type),
(gst_video_box_get_type), (gst_video_box_base_init),
(gst_video_box_class_init), (gst_video_box_init),
(gst_video_box_set_property), (gst_video_box_get_property),
(gst_video_box_sink_link), (gst_video_box_i420),
(gst_video_box_ayuv), (gst_video_box_chain),
(gst_video_box_change_state), (plugin_init):
Crops or adds borders around an image. can do alpha channel
borders as well.
* gst/videomixer/Makefile.am:
* gst/videomixer/README:
* gst/videomixer/videomixer.c: (gst_videomixer_pad_get_type),
(gst_videomixer_pad_base_init), (gst_videomixer_pad_class_init),
(gst_videomixer_pad_get_sink_event_masks),
(gst_videomixer_pad_get_property),
(gst_videomixer_pad_set_property),
(gst_videomixer_pad_sinkconnect), (gst_videomixer_pad_link),
(gst_videomixer_pad_unlink), (gst_videomixer_pad_init),
(gst_video_mixer_background_get_type), (gst_videomixer_get_type),
(gst_videomixer_base_init), (gst_videomixer_class_init),
(gst_videomixer_init), (gst_videomixer_request_new_pad),
(gst_videomixer_handle_src_event),
(gst_videomixer_blend_ayuv_i420), (gst_videomixer_fill_checker),
(gst_videomixer_fill_color), (gst_videomixer_fill_queues),
(gst_videomixer_blend_buffers), (gst_videomixer_update_queues),
(gst_videomixer_loop), (gst_videomixer_get_property),
(gst_videomixer_set_property), (gst_videomixer_change_state),
(plugin_init):
Generic video mixer plugin, can handle multiple inputs all with
different framerates and video sizes. Is fully alpha channel
aware.
-rw-r--r-- | ChangeLog | 67 | ||||
-rw-r--r-- | configure.ac | 8 | ||||
-rw-r--r-- | gst/alpha/Makefile.am | 9 | ||||
-rw-r--r-- | gst/alpha/gstalpha.c | 558 | ||||
-rw-r--r-- | gst/multipart/Makefile.am | 9 | ||||
-rw-r--r-- | gst/multipart/multipart.c | 44 | ||||
-rw-r--r-- | gst/multipart/multipartdemux.c | 374 | ||||
-rw-r--r-- | gst/multipart/multipartmux.c | 593 | ||||
-rw-r--r-- | gst/videobox/Makefile.am | 9 | ||||
-rw-r--r-- | gst/videobox/README | 21 | ||||
-rw-r--r-- | gst/videobox/gstvideobox.c | 734 | ||||
-rw-r--r-- | gst/videomixer/Makefile.am | 9 | ||||
-rw-r--r-- | gst/videomixer/README | 27 | ||||
-rw-r--r-- | gst/videomixer/videomixer.c | 1116 |
14 files changed, 3578 insertions, 0 deletions
@@ -1,3 +1,70 @@ +2004-05-28 Wim Taymans <wim@fluendo.com> + + * configure.ac: + * gst/alpha/Makefile.am: + * gst/alpha/gstalpha.c: (gst_alpha_method_get_type), + (gst_alpha_get_type), (gst_alpha_base_init), + (gst_alpha_class_init), (gst_alpha_init), (gst_alpha_set_property), + (gst_alpha_get_property), (gst_alpha_sink_link), (gst_alpha_add), + (gst_alpha_chroma_key), (gst_alpha_chain), + (gst_alpha_change_state), (plugin_init): + A plugin to add an alpha channel to I420 video. Can optionally do + chroma keying. + * gst/multipart/Makefile.am: + * gst/multipart/multipart.c: (plugin_init): + * gst/multipart/multipartdemux.c: (gst_multipart_demux_base_init), + (gst_multipart_demux_class_init), (gst_multipart_demux_init), + (gst_multipart_demux_finalize), (gst_multipart_demux_handle_event), + (gst_multipart_find_pad_by_mime), (gst_multipart_demux_chain), + (gst_multipart_demux_change_state), + (gst_multipart_demux_plugin_init): + * gst/multipart/multipartmux.c: (gst_multipart_mux_get_type), + (gst_multipart_mux_base_init), (gst_multipart_mux_class_init), + (gst_multipart_mux_get_sink_event_masks), (gst_multipart_mux_init), + (gst_multipart_mux_sinkconnect), (gst_multipart_mux_pad_link), + (gst_multipart_mux_pad_unlink), + (gst_multipart_mux_request_new_pad), + (gst_multipart_mux_handle_src_event), + (gst_multipart_mux_next_buffer), (gst_multipart_mux_compare_pads), + (gst_multipart_mux_queue_pads), (gst_multipart_mux_loop), + (gst_multipart_mux_get_property), (gst_multipart_mux_set_property), + (gst_multipart_mux_change_state), (gst_multipart_mux_plugin_init): + A Multipart demuxer/muxer. Not sure if it violates specs. Used to + send multipart jpeg images to a browser. + * gst/videobox/Makefile.am: + * gst/videobox/README: + * gst/videobox/gstvideobox.c: (gst_video_box_fill_get_type), + (gst_video_box_get_type), (gst_video_box_base_init), + (gst_video_box_class_init), (gst_video_box_init), + (gst_video_box_set_property), (gst_video_box_get_property), + (gst_video_box_sink_link), (gst_video_box_i420), + (gst_video_box_ayuv), (gst_video_box_chain), + (gst_video_box_change_state), (plugin_init): + Crops or adds borders around an image. can do alpha channel + borders as well. + * gst/videomixer/Makefile.am: + * gst/videomixer/README: + * gst/videomixer/videomixer.c: (gst_videomixer_pad_get_type), + (gst_videomixer_pad_base_init), (gst_videomixer_pad_class_init), + (gst_videomixer_pad_get_sink_event_masks), + (gst_videomixer_pad_get_property), + (gst_videomixer_pad_set_property), + (gst_videomixer_pad_sinkconnect), (gst_videomixer_pad_link), + (gst_videomixer_pad_unlink), (gst_videomixer_pad_init), + (gst_video_mixer_background_get_type), (gst_videomixer_get_type), + (gst_videomixer_base_init), (gst_videomixer_class_init), + (gst_videomixer_init), (gst_videomixer_request_new_pad), + (gst_videomixer_handle_src_event), + (gst_videomixer_blend_ayuv_i420), (gst_videomixer_fill_checker), + (gst_videomixer_fill_color), (gst_videomixer_fill_queues), + (gst_videomixer_blend_buffers), (gst_videomixer_update_queues), + (gst_videomixer_loop), (gst_videomixer_get_property), + (gst_videomixer_set_property), (gst_videomixer_change_state), + (plugin_init): + Generic video mixer plugin, can handle multiple inputs all with + different framerates and video sizes. Is fully alpha channel + aware. + 2004-05-27 Ronald Bultje <rbultje@ronald.bitfreak.net> * ext/alsa/gstalsamixer.c: (gst_alsa_mixer_build_list): diff --git a/configure.ac b/configure.ac index b242d44d..9ea90989 100644 --- a/configure.ac +++ b/configure.ac @@ -336,6 +336,7 @@ dnl these are all the gst plug-ins, compilable without additional libs GST_PLUGINS_ALL="\ ac3parse \ adder \ + alpha \ asfdemux \ audioconvert \ audioscale \ @@ -366,6 +367,7 @@ GST_PLUGINS_ALL="\ mpegaudioparse \ mpegstream \ monoscope \ + multipart \ overlay \ passthrough \ playondemand \ @@ -387,10 +389,12 @@ GST_PLUGINS_ALL="\ typefind \ udp \ vbidec \ + videobox \ videocrop \ videodrop \ videoflip \ videofilter \ + videomixer \ videoscale \ videotestsrc \ volenv \ @@ -1742,6 +1746,7 @@ gst-plugins.spec gst/Makefile gst/ac3parse/Makefile gst/adder/Makefile +gst/alpha/Makefile gst/audioconvert/Makefile gst/audioscale/Makefile gst/auparse/Makefile @@ -1774,6 +1779,7 @@ gst/mpegstream/Makefile gst/modplug/Makefile gst/modplug/libmodplug/Makefile gst/monoscope/Makefile +gst/multipart/Makefile gst/overlay/Makefile gst/passthrough/Makefile gst/playondemand/Makefile @@ -1795,10 +1801,12 @@ gst/tcp/Makefile gst/typefind/Makefile gst/udp/Makefile gst/vbidec/Makefile +gst/videobox/Makefile gst/videocrop/Makefile gst/videodrop/Makefile gst/videofilter/Makefile gst/videoflip/Makefile +gst/videomixer/Makefile gst/videoscale/Makefile gst/videotestsrc/Makefile gst/volenv/Makefile diff --git a/gst/alpha/Makefile.am b/gst/alpha/Makefile.am new file mode 100644 index 00000000..cd908f9d --- /dev/null +++ b/gst/alpha/Makefile.am @@ -0,0 +1,9 @@ + +plugin_LTLIBRARIES = libgstalpha.la + +libgstalpha_la_SOURCES = gstalpha.c +libgstalpha_la_CFLAGS = $(GST_CFLAGS) +libgstalpha_la_LIBADD = +libgstalpha_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) + +noinst_HEADERS = diff --git a/gst/alpha/gstalpha.c b/gst/alpha/gstalpha.c new file mode 100644 index 00000000..ece724e9 --- /dev/null +++ b/gst/alpha/gstalpha.c @@ -0,0 +1,558 @@ +/* GStreamer + * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include <gst/gst.h> +#include <gst/video/video.h> + +#include <string.h> + +#define GST_TYPE_ALPHA \ + (gst_alpha_get_type()) +#define GST_ALPHA(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ALPHA,GstAlpha)) +#define GST_ALPHA_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ALPHA,GstAlphaClass)) +#define GST_IS_ALPHA(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ALPHA)) +#define GST_IS_ALPHA_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ALPHA)) + +typedef struct _GstAlpha GstAlpha; +typedef struct _GstAlphaClass GstAlphaClass; + +typedef enum +{ + ALPHA_METHOD_ADD, + ALPHA_METHOD_GREEN, + ALPHA_METHOD_BLUE, +} +GstAlphaMethod; + +#define DEFAULT_METHOD ALPHA_METHOD_ADD +#define DEFAULT_ALPHA 1.0 +#define DEFAULT_TARGET_CR 116 +#define DEFAULT_TARGET_CB 116 + +struct _GstAlpha +{ + GstElement element; + + /* pads */ + GstPad *sinkpad; + GstPad *srcpad; + + /* caps */ + gint in_width, in_height; + gint out_width, out_height; + + gdouble alpha; + + guint target_cr, target_cb; + + GstAlphaMethod method; +}; + +struct _GstAlphaClass +{ + GstElementClass parent_class; +}; + +/* elementfactory information */ +static GstElementDetails gst_alpha_details = +GST_ELEMENT_DETAILS ("alpha filter", + "Filter/Effect/Video", + "Adds an alpha channel to video", + "Wim Taymans <wim@fluendo.com>"); + + +/* Alpha signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + ARG_0, + ARG_METHOD, + ARG_ALPHA, + ARG_TARGET_CR, + ARG_TARGET_CB, + /* FILL ME */ +}; + +static GstStaticPadTemplate gst_alpha_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")) + ); + +static GstStaticPadTemplate gst_alpha_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + ); + + +static void gst_alpha_base_init (gpointer g_class); +static void gst_alpha_class_init (GstAlphaClass * klass); +static void gst_alpha_init (GstAlpha * alpha); + +static void gst_alpha_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_alpha_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static GstPadLinkReturn +gst_alpha_sink_link (GstPad * pad, const GstCaps * caps); +static void gst_alpha_chain (GstPad * pad, GstData * _data); + +static GstElementStateReturn gst_alpha_change_state (GstElement * element); + + +static GstElementClass *parent_class = NULL; + +#define GST_TYPE_ALPHA_METHOD (gst_alpha_method_get_type()) +static GType +gst_alpha_method_get_type (void) +{ + static GType alpha_method_type = 0; + static GEnumValue alpha_method[] = { + {ALPHA_METHOD_ADD, "0", "Add alpha channel"}, + {ALPHA_METHOD_GREEN, "1", "Chroma Key green"}, + {ALPHA_METHOD_BLUE, "2", "Chroma Key blue"}, + {0, NULL, NULL}, + }; + + if (!alpha_method_type) { + alpha_method_type = g_enum_register_static ("GstAlphaMethod", alpha_method); + } + return alpha_method_type; +} + +/* static guint gst_alpha_signals[LAST_SIGNAL] = { 0 }; */ + +GType +gst_alpha_get_type (void) +{ + static GType alpha_type = 0; + + if (!alpha_type) { + static const GTypeInfo alpha_info = { + sizeof (GstAlphaClass), + gst_alpha_base_init, + NULL, + (GClassInitFunc) gst_alpha_class_init, + NULL, + NULL, + sizeof (GstAlpha), + 0, + (GInstanceInitFunc) gst_alpha_init, + }; + + alpha_type = + g_type_register_static (GST_TYPE_ELEMENT, "GstAlpha", &alpha_info, 0); + } + return alpha_type; +} + +static void +gst_alpha_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &gst_alpha_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_alpha_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_alpha_src_template)); +} +static void +gst_alpha_class_init (GstAlphaClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); + + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_METHOD, + g_param_spec_enum ("method", "Method", + "How the alpha channels should be created", GST_TYPE_ALPHA_METHOD, + DEFAULT_METHOD, (GParamFlags) G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ALPHA, + g_param_spec_double ("alpha", "Alpha", "The value for the alpha channel", + 0.0, 1.0, DEFAULT_ALPHA, (GParamFlags) G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_TARGET_CR, + g_param_spec_uint ("target_cr", "Target Red", "The Red Chroma target", 0, + 255, 116, (GParamFlags) G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_TARGET_CB, + g_param_spec_uint ("target_cb", "Target Blue", "The Blue Chroma target", + 0, 255, 116, (GParamFlags) G_PARAM_READWRITE)); + + gobject_class->set_property = gst_alpha_set_property; + gobject_class->get_property = gst_alpha_get_property; + + gstelement_class->change_state = gst_alpha_change_state; +} + +static void +gst_alpha_init (GstAlpha * alpha) +{ + /* create the sink and src pads */ + alpha->sinkpad = + gst_pad_new_from_template (gst_static_pad_template_get + (&gst_alpha_sink_template), "sink"); + gst_element_add_pad (GST_ELEMENT (alpha), alpha->sinkpad); + gst_pad_set_chain_function (alpha->sinkpad, gst_alpha_chain); + gst_pad_set_link_function (alpha->sinkpad, gst_alpha_sink_link); + + alpha->srcpad = + gst_pad_new_from_template (gst_static_pad_template_get + (&gst_alpha_src_template), "src"); + gst_element_add_pad (GST_ELEMENT (alpha), alpha->srcpad); + + alpha->alpha = DEFAULT_ALPHA; + alpha->method = DEFAULT_METHOD; + alpha->target_cr = DEFAULT_TARGET_CR; + alpha->target_cb = DEFAULT_TARGET_CB; + + GST_FLAG_SET (alpha, GST_ELEMENT_EVENT_AWARE); +} + +/* do we need this function? */ +static void +gst_alpha_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstAlpha *alpha; + + /* it's not null if we got it, but it might not be ours */ + g_return_if_fail (GST_IS_ALPHA (object)); + + alpha = GST_ALPHA (object); + + switch (prop_id) { + case ARG_METHOD: + alpha->method = g_value_get_enum (value); + break; + case ARG_ALPHA: + alpha->alpha = g_value_get_double (value); + break; + case ARG_TARGET_CB: + alpha->target_cb = g_value_get_uint (value); + break; + case ARG_TARGET_CR: + alpha->target_cr = g_value_get_uint (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} +static void +gst_alpha_get_property (GObject * object, guint prop_id, GValue * value, + GParamSpec * pspec) +{ + GstAlpha *alpha; + + /* it's not null if we got it, but it might not be ours */ + g_return_if_fail (GST_IS_ALPHA (object)); + + alpha = GST_ALPHA (object); + + switch (prop_id) { + case ARG_METHOD: + g_value_set_enum (value, alpha->method); + break; + case ARG_ALPHA: + g_value_set_double (value, alpha->alpha); + break; + case ARG_TARGET_CR: + g_value_set_uint (value, alpha->target_cr); + break; + case ARG_TARGET_CB: + g_value_set_uint (value, alpha->target_cb); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstPadLinkReturn +gst_alpha_sink_link (GstPad * pad, const GstCaps * caps) +{ + GstAlpha *alpha; + GstStructure *structure; + gboolean ret; + + alpha = GST_ALPHA (gst_pad_get_parent (pad)); + structure = gst_caps_get_structure (caps, 0); + + ret = gst_structure_get_int (structure, "width", &alpha->in_width); + ret &= gst_structure_get_int (structure, "height", &alpha->in_height); + + return GST_PAD_LINK_OK; +} + +/* +static int yuv_colors_Y[] = { 16, 150, 29 }; +static int yuv_colors_U[] = { 128, 46, 255 }; +static int yuv_colors_V[] = { 128, 21, 107 }; +*/ + +static void +gst_alpha_add (guint8 * src, guint8 * dest, gint width, gint height, + gdouble alpha) +{ + gint b_alpha = (gint) (alpha * 255); + guint8 *srcY; + guint8 *srcU; + guint8 *srcV; + gint size; + gint half_width = width / 2; + gint i, j; + + size = width * height; + + srcY = src; + srcU = srcY + size; + srcV = srcU + size / 4; + + for (i = 0; i < height; i++) { + for (j = 0; j < width / 2; j++) { + *dest++ = b_alpha; + *dest++ = *srcY++; + *dest++ = *srcU; + *dest++ = *srcV; + *dest++ = b_alpha; + *dest++ = *srcY++; + *dest++ = *srcU++; + *dest++ = *srcV++; + } + if (i % 2 == 0) { + srcU -= half_width; + srcV -= half_width; + } + } +} + +static void +gst_alpha_chroma_key (gchar * src, gchar * dest, gint width, gint height, + gboolean soft, gint target_u, gint target_v, gfloat edge_factor, + gdouble alpha) +{ + gint b_alpha; + gint f_alpha = (gint) (alpha * 255); + guint8 *srcY1, *srcY2, *srcU, *srcV; + guint8 *dest1, *dest2; + gint i, j; + gint x, z, u, v; + gint size; + + size = width * height; + + srcY1 = src; + srcY2 = src + width; + srcU = srcY1 + size; + srcV = srcU + size / 4; + + dest1 = dest; + dest2 = dest + width * 4; + + for (i = 0; i < height / 2; i++) { + for (j = 0; j < width / 2; j++) { + u = *srcU++; + v = *srcV++; + + x = target_u - u; + z = target_v - v; + + // only filter if in top left square + if ((x > 0) && (z > 0)) { + // only calculate lot of stuff if we'll use soft edges + if (soft) { + gint ds = (x > z) ? z : x; + + gfloat df = (gfloat) (ds) / edge_factor; + + if (df > 1.0) + df = 1.0; + + // suppress foreground + if (x > z) { + u += z; + v += z; + } else { + u += x; + v += x; + } + b_alpha = (int) (f_alpha * (1.0 - df)); + } else { + // kill color and alpha + b_alpha = 0; + } + } else { + // do nothing; + b_alpha = f_alpha; + } + + *dest1++ = b_alpha; + *dest1++ = *srcY1++; + *dest1++ = u; + *dest1++ = v; + *dest1++ = b_alpha; + *dest1++ = *srcY1++; + *dest1++ = u; + *dest1++ = v; + *dest2++ = b_alpha; + *dest2++ = *srcY2++; + *dest2++ = u; + *dest2++ = v; + *dest2++ = b_alpha; + *dest2++ = *srcY2++; + *dest2++ = u; + *dest2++ = v; + } + dest1 += width * 4; + dest2 += width * 4; + srcY1 += width; + srcY2 += width; + } +} + +static void +gst_alpha_chain (GstPad * pad, GstData * _data) +{ + GstBuffer *buffer; + GstAlpha *alpha; + GstBuffer *outbuf; + gint new_width, new_height; + + alpha = GST_ALPHA (gst_pad_get_parent (pad)); + + if (GST_IS_EVENT (_data)) { + GstEvent *event = GST_EVENT (_data); + + switch (GST_EVENT_TYPE (event)) { + default: + gst_pad_event_default (pad, event); + break; + } + return; + } + + buffer = GST_BUFFER (_data); + + new_width = alpha->in_width; + new_height = alpha->in_height; + + if (new_width != alpha->out_width || + new_height != alpha->out_height || !GST_PAD_CAPS (alpha->srcpad)) { + GstCaps *newcaps; + + newcaps = gst_caps_copy (gst_pad_get_negotiated_caps (alpha->sinkpad)); + gst_caps_set_simple (newcaps, + "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("AYUV"), + "width", G_TYPE_INT, new_width, "height", G_TYPE_INT, new_height, NULL); + + if (!gst_pad_try_set_caps (alpha->srcpad, newcaps)) { + GST_ELEMENT_ERROR (alpha, CORE, NEGOTIATION, (NULL), (NULL)); + return; + } + + alpha->out_width = new_width; + alpha->out_height = new_height; + } + + outbuf = gst_buffer_new_and_alloc (new_width * new_height * 4); + GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer); + GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer); + + switch (alpha->method) { + case ALPHA_METHOD_ADD: + gst_alpha_add (GST_BUFFER_DATA (buffer), + GST_BUFFER_DATA (outbuf), new_width, new_height, alpha->alpha); + break; + case ALPHA_METHOD_GREEN: + gst_alpha_chroma_key (GST_BUFFER_DATA (buffer), + GST_BUFFER_DATA (outbuf), + new_width, new_height, + TRUE, alpha->target_cr, alpha->target_cb, 1.0, alpha->alpha); + break; + case ALPHA_METHOD_BLUE: + gst_alpha_chroma_key (GST_BUFFER_DATA (buffer), + GST_BUFFER_DATA (outbuf), + new_width, new_height, TRUE, 100, 100, 1.0, alpha->alpha); + break; + } + + gst_buffer_unref (buffer); + + gst_pad_push (alpha->srcpad, GST_DATA (outbuf)); +} + +static GstElementStateReturn +gst_alpha_change_state (GstElement * element) +{ + GstAlpha *alpha; + + alpha = GST_ALPHA (element); + + switch (GST_STATE_TRANSITION (element)) { + case GST_STATE_NULL_TO_READY: + break; + case GST_STATE_READY_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_PLAYING: + break; + case GST_STATE_PLAYING_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_READY: + break; + case GST_STATE_READY_TO_NULL: + break; + } + + parent_class->change_state (element); + + return GST_STATE_SUCCESS; +} + +static gboolean +plugin_init (GstPlugin * plugin) +{ + return gst_element_register (plugin, "alpha", GST_RANK_NONE, GST_TYPE_ALPHA); +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "alpha", + "resizes a video by adding borders or cropping", + plugin_init, VERSION, GST_LICENSE, GST_PACKAGE, GST_ORIGIN) diff --git a/gst/multipart/Makefile.am b/gst/multipart/Makefile.am new file mode 100644 index 00000000..45632eff --- /dev/null +++ b/gst/multipart/Makefile.am @@ -0,0 +1,9 @@ +plugindir = $(libdir)/gstreamer-@GST_MAJORMINOR@ + +plugin_LTLIBRARIES = libmultipart.la + +libmultipart_la_SOURCES = multipart.c multipartdemux.c multipartmux.c +libmultipart_la_CFLAGS = $(GST_CFLAGS) +libmultipart_la_LIBADD = +libmultipart_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) + diff --git a/gst/multipart/multipart.c b/gst/multipart/multipart.c new file mode 100644 index 00000000..802653fb --- /dev/null +++ b/gst/multipart/multipart.c @@ -0,0 +1,44 @@ +/* GStreamer + * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include <gst/gst.h> + +extern gboolean gst_multipart_demux_plugin_init (GstPlugin * plugin); +extern gboolean gst_multipart_mux_plugin_init (GstPlugin * plugin); + +GST_DEBUG_CATEGORY (vorbisdec_debug); + +static gboolean +plugin_init (GstPlugin * plugin) +{ + gst_multipart_demux_plugin_init (plugin); + gst_multipart_mux_plugin_init (plugin); + + return TRUE; +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "multipart", + "multipart stream manipulation", + plugin_init, VERSION, GST_LICENSE, GST_PACKAGE, GST_ORIGIN) diff --git a/gst/multipart/multipartdemux.c b/gst/multipart/multipartdemux.c new file mode 100644 index 00000000..6406df27 --- /dev/null +++ b/gst/multipart/multipartdemux.c @@ -0,0 +1,374 @@ +/* GStreamer + * Copyright (C) 2004 Wim Taymans <wim@fluendo.com> + * + * gstmultipartdemux.c: multipart stream demuxer + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include <gst/gst.h> + +#include <string.h> + +GST_DEBUG_CATEGORY_STATIC (gst_multipart_demux_debug); +#define GST_CAT_DEFAULT gst_multipart_demux_debug + +#define GST_TYPE_MULTIPART_DEMUX (gst_multipart_demux_get_type()) +#define GST_MULTIPART_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTIPART_DEMUX, GstMultipartDemux)) +#define GST_MULTIPART_DEMUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTIPART_DEMUX, GstMultipartDemux)) +#define GST_IS_MULTIPART_DEMUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTIPART_DEMUX)) +#define GST_IS_MULTIPART_DEMUX_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTIPART_DEMUX)) + +#define MAX_LINE_LEN 500 + +typedef struct _GstMultipartDemux GstMultipartDemux; +typedef struct _GstMultipartDemuxClass GstMultipartDemuxClass; + +static gchar *toFind = "--ThisRandomString\nContent-type: "; //image/jpeg\n\n"; +static gint toFindLen; + +/* all information needed for one multipart stream */ +typedef struct +{ + GstPad *pad; /* reference for this pad is held by element we belong to */ + + gchar *mime; + + guint64 offset; /* end offset of last buffer */ + guint64 known_offset; /* last known offset */ + + guint flags; +} +GstMultipartPad; + +struct _GstMultipartDemux +{ + GstElement element; + + /* pad */ + GstPad *sinkpad; + + GSList *srcpads; + gint numpads; + + gchar *parsing_mime; + gchar *buffer; + gint maxlen; + gint bufsize; + gint scanpos; + gint lastpos; +}; + +struct _GstMultipartDemuxClass +{ + GstElementClass parent_class; +}; + +/* elementfactory information */ +static GstElementDetails gst_multipart_demux_details = +GST_ELEMENT_DETAILS ("multipart demuxer", + "Codec/Demuxer", + "demux multipart streams", + "Wim Taymans <wim@fluendo.com>"); + + +/* signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + ARG_0, + /* FILL ME */ +}; + +static GstStaticPadTemplate multipart_demux_src_template_factory = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_SOMETIMES, + GST_STATIC_CAPS_ANY); + +static GstStaticPadTemplate multipart_demux_sink_template_factory = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("multipart/x-mixed-replace") + ); + + +static void gst_multipart_demux_finalize (GObject * object); + +static void gst_multipart_demux_chain (GstPad * pad, GstData * buffer); + +static GstElementStateReturn gst_multipart_demux_change_state (GstElement * + element); + + +GST_BOILERPLATE (GstMultipartDemux, gst_multipart_demux, GstElement, + GST_TYPE_ELEMENT) + + static void gst_multipart_demux_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &gst_multipart_demux_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&multipart_demux_sink_template_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&multipart_demux_src_template_factory)); + + toFindLen = strlen (toFind); +} + +static void +gst_multipart_demux_class_init (GstMultipartDemuxClass * klass) +{ + GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass); + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + + gstelement_class->change_state = gst_multipart_demux_change_state; + + gobject_class->finalize = gst_multipart_demux_finalize; +} + +static void +gst_multipart_demux_init (GstMultipartDemux * multipart) +{ + /* create the sink pad */ + multipart->sinkpad = + gst_pad_new_from_template (gst_static_pad_template_get + (&multipart_demux_sink_template_factory), "sink"); + gst_element_add_pad (GST_ELEMENT (multipart), multipart->sinkpad); + gst_pad_set_chain_function (multipart->sinkpad, + GST_DEBUG_FUNCPTR (gst_multipart_demux_chain)); + + GST_FLAG_SET (multipart, GST_ELEMENT_EVENT_AWARE); + + multipart->maxlen = 4096; + multipart->buffer = g_malloc (multipart->maxlen); + multipart->parsing_mime = NULL; + multipart->numpads = 0; + multipart->scanpos = 0; + multipart->lastpos = 0; +} + +static void +gst_multipart_demux_finalize (GObject * object) +{ + GstMultipartDemux *multipart; + + multipart = GST_MULTIPART_DEMUX (object); +} + +static void +gst_multipart_demux_handle_event (GstPad * pad, GstEvent * event) +{ + //GstMultipartDemux *multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_DISCONTINUOUS: + case GST_EVENT_EOS: + default: + gst_pad_event_default (pad, event); + break; + } + return; +} + +static GstMultipartPad * +gst_multipart_find_pad_by_mime (GstMultipartDemux * demux, gchar * mime) +{ + GSList *walk; + + walk = demux->srcpads; + while (walk) { + GstMultipartPad *pad = (GstMultipartPad *) walk->data; + + if (!strcmp (pad->mime, mime)) { + return pad; + } + + walk = walk->next; + } + // pad not found, create it + { + GstPad *pad; + GstMultipartPad *mppad; + gchar *name; + GstCaps *caps; + + mppad = g_new0 (GstMultipartPad, 1); + + name = g_strdup_printf ("src_%d", demux->numpads); + pad = gst_pad_new_from_template (gst_static_pad_template_get + (&multipart_demux_src_template_factory), name); + g_free (name); + caps = gst_caps_from_string (mime); + gst_pad_use_explicit_caps (pad); + gst_pad_set_explicit_caps (pad, caps); + + mppad->pad = pad; + mppad->mime = g_strdup (mime); + + demux->srcpads = g_slist_prepend (demux->srcpads, mppad); + demux->numpads++; + + gst_element_add_pad (GST_ELEMENT (demux), pad); + + return mppad; + } +} + +static void +gst_multipart_demux_chain (GstPad * pad, GstData * buffer) +{ + GstMultipartDemux *multipart; + gint size; + gchar *data; + gint matchpos; + + /* handle events */ + if (GST_IS_EVENT (buffer)) { + gst_multipart_demux_handle_event (pad, GST_EVENT (buffer)); + return; + } + + multipart = GST_MULTIPART_DEMUX (gst_pad_get_parent (pad)); + + data = GST_BUFFER_DATA (buffer); + size = GST_BUFFER_SIZE (buffer); + + // first make sure our buffer is long enough + if (multipart->bufsize + size > multipart->maxlen) { + gint newsize = (multipart->bufsize + size) * 2; + + multipart->buffer = g_realloc (multipart->buffer, newsize); + multipart->maxlen = newsize; + } + // copy bytes into the buffer + memcpy (multipart->buffer + multipart->bufsize, data, size); + multipart->bufsize += size; + + // find \n + while (multipart->scanpos < multipart->bufsize) { + if (multipart->buffer[multipart->scanpos] == '\n') { + + } + multipart->scanpos++; + } + + // then scan for the boundary + for (matchpos = 0; + multipart->scanpos + toFindLen + MAX_LINE_LEN - matchpos < + multipart->bufsize; multipart->scanpos++) { + if (multipart->buffer[multipart->scanpos] == toFind[matchpos]) { + matchpos++; + if (matchpos == toFindLen) { + int datalen; + int i, start; + gchar *mime_type; + + multipart->scanpos++; + + start = multipart->scanpos; + // find \n + for (i = 0; i < MAX_LINE_LEN; i++) { + if (multipart->buffer[multipart->scanpos] == '\n') + break; + multipart->scanpos++; + matchpos++; + } + mime_type = + g_strndup (multipart->buffer + start, multipart->scanpos - start); + multipart->scanpos += 2; + matchpos += 3; + + datalen = multipart->scanpos - matchpos; + if (datalen > 0 && multipart->parsing_mime) { + GstBuffer *outbuf; + GstMultipartPad *srcpad; + + srcpad = + gst_multipart_find_pad_by_mime (multipart, + multipart->parsing_mime); + if (srcpad != NULL) { + outbuf = gst_buffer_new_and_alloc (datalen); + + memcpy (GST_BUFFER_DATA (outbuf), multipart->buffer, datalen); + GST_BUFFER_TIMESTAMP (outbuf) = 0; + gst_pad_push (srcpad->pad, GST_DATA (outbuf)); + } + } + // move rest downward + multipart->bufsize -= multipart->scanpos; + memcpy (multipart->buffer, multipart->buffer + multipart->scanpos, + multipart->bufsize); + + multipart->parsing_mime = mime_type; + multipart->scanpos = 0; + } + } else { + matchpos = 0; + } + } + + gst_buffer_unref (buffer); +} + +static GstElementStateReturn +gst_multipart_demux_change_state (GstElement * element) +{ + GstMultipartDemux *multipart; + + multipart = GST_MULTIPART_DEMUX (element); + + switch (GST_STATE_TRANSITION (element)) { + case GST_STATE_NULL_TO_READY: + break; + case GST_STATE_READY_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_PLAYING: + break; + case GST_STATE_PLAYING_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_READY: + break; + case GST_STATE_READY_TO_NULL: + break; + default: + g_assert_not_reached (); + break; + } + + return parent_class->change_state (element); +} + +gboolean +gst_multipart_demux_plugin_init (GstPlugin * plugin) +{ + GST_DEBUG_CATEGORY_INIT (gst_multipart_demux_debug, + "multipartdemux", 0, "multipart demuxer"); + + return gst_element_register (plugin, "multipartdemux", GST_RANK_PRIMARY, + GST_TYPE_MULTIPART_DEMUX); +} diff --git a/gst/multipart/multipartmux.c b/gst/multipart/multipartmux.c new file mode 100644 index 00000000..d4d96bc6 --- /dev/null +++ b/gst/multipart/multipartmux.c @@ -0,0 +1,593 @@ +/* multipart muxer plugin for GStreamer + * Copyright (C) 2004 Wim Taymans <wim@fluendo.com> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include <gst/gst.h> +#include <string.h> + +GST_DEBUG_CATEGORY_STATIC (gst_multipart_mux_debug); +#define GST_CAT_DEFAULT gst_multipart_mux_debug + +#define GST_TYPE_MULTIPART_MUX (gst_multipart_mux_get_type()) +#define GST_MULTIPART_MUX(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MULTIPART_MUX, GstMultipartMux)) +#define GST_MULTIPART_MUX_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MULTIPART_MUX, GstMultipartMux)) +#define GST_IS_MULTIPART_MUX(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MULTIPART_MUX)) +#define GST_IS_MULTIPART_MUX_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MULTIPART_MUX)) + +typedef struct _GstMultipartMux GstMultipartMux; +typedef struct _GstMultipartMuxClass GstMultipartMuxClass; + +/* all information needed for one multipart stream */ +typedef struct +{ + GstPad *pad; /* reference for this pad is held by element we belong to */ + + GstBuffer *buffer; /* the queued buffer for this pad */ + + gboolean eos; + const gchar *mimetype; + + guint state; /* state of the pad */ +} +GstMultipartPad; + +struct _GstMultipartMux +{ + GstElement element; + + /* pad */ + GstPad *srcpad; + + /* sinkpads, a GSList of GstMultipartPads */ + GSList *sinkpads; + gint numpads; + + /* the pad we are currently pulling from to fill a page */ + GstMultipartPad *pulling; + + /* next timestamp for the page */ + GstClockTime next_ts; + + /* offset in stream */ + guint64 offset; +}; + +typedef enum +{ + GST_MULTIPART_FLAG_BOS = GST_ELEMENT_FLAG_LAST, + GST_MULTIPART_FLAG_EOS +} +GstMultipartFlag; + +struct _GstMultipartMuxClass +{ + GstElementClass parent_class; +}; + +/* elementfactory information */ +static GstElementDetails gst_multipart_mux_details = +GST_ELEMENT_DETAILS ("multipart muxer", + "Codec/Muxer", + "mux multipart streams", + "Wim Taymans <wim@fluendo.com>"); + +/* MultipartMux signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + ARG_0, +}; + +static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("multipart/x-mixed-replace") + ); + +static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%d", + GST_PAD_SINK, + GST_PAD_REQUEST, + GST_STATIC_CAPS_ANY /* we can take anything, really */ + ); + +static void gst_multipart_mux_base_init (gpointer g_class); +static void gst_multipart_mux_class_init (GstMultipartMuxClass * klass); +static void gst_multipart_mux_init (GstMultipartMux * multipart_mux); + +static void gst_multipart_mux_loop (GstElement * element); +static gboolean gst_multipart_mux_handle_src_event (GstPad * pad, + GstEvent * event); +static GstPad *gst_multipart_mux_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * name); +static void gst_multipart_mux_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_multipart_mux_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); +static GstElementStateReturn gst_multipart_mux_change_state (GstElement * + element); + +static GstElementClass *parent_class = NULL; + +/*static guint gst_multipart_mux_signals[LAST_SIGNAL] = { 0 }; */ + +GType +gst_multipart_mux_get_type (void) +{ + static GType multipart_mux_type = 0; + + if (!multipart_mux_type) { + static const GTypeInfo multipart_mux_info = { + sizeof (GstMultipartMuxClass), + gst_multipart_mux_base_init, + NULL, + (GClassInitFunc) gst_multipart_mux_class_init, + NULL, + NULL, + sizeof (GstMultipartMux), + 0, + (GInstanceInitFunc) gst_multipart_mux_init, + }; + + multipart_mux_type = + g_type_register_static (GST_TYPE_ELEMENT, "GstMultipartMux", + &multipart_mux_info, 0); + } + return multipart_mux_type; +} + +static void +gst_multipart_mux_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_factory)); + + gst_element_class_set_details (element_class, &gst_multipart_mux_details); +} + +static void +gst_multipart_mux_class_init (GstMultipartMuxClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); + + gstelement_class->request_new_pad = gst_multipart_mux_request_new_pad; + + gstelement_class->change_state = gst_multipart_mux_change_state; + + gstelement_class->get_property = gst_multipart_mux_get_property; + gstelement_class->set_property = gst_multipart_mux_set_property; +} + +static const GstEventMask * +gst_multipart_mux_get_sink_event_masks (GstPad * pad) +{ + static const GstEventMask gst_multipart_mux_sink_event_masks[] = { + {GST_EVENT_EOS, 0}, + {0,} + }; + + return gst_multipart_mux_sink_event_masks; +} + +static void +gst_multipart_mux_init (GstMultipartMux * multipart_mux) +{ + GstElementClass *klass = GST_ELEMENT_GET_CLASS (multipart_mux); + + multipart_mux->srcpad = + gst_pad_new_from_template (gst_element_class_get_pad_template (klass, + "src"), "src"); + gst_pad_set_event_function (multipart_mux->srcpad, + gst_multipart_mux_handle_src_event); + gst_element_add_pad (GST_ELEMENT (multipart_mux), multipart_mux->srcpad); + + GST_FLAG_SET (GST_ELEMENT (multipart_mux), GST_ELEMENT_EVENT_AWARE); + GST_FLAG_SET (GST_ELEMENT (multipart_mux), GST_MULTIPART_FLAG_BOS); + + multipart_mux->sinkpads = NULL; + multipart_mux->pulling = NULL; + + gst_element_set_loop_function (GST_ELEMENT (multipart_mux), + gst_multipart_mux_loop); +} + +static GstPadLinkReturn +gst_multipart_mux_sinkconnect (GstPad * pad, const GstCaps * vscaps) +{ + GstMultipartMux *multipart_mux; + GstMultipartPad *mppad; + GstStructure *structure; + + multipart_mux = GST_MULTIPART_MUX (gst_pad_get_parent (pad)); + + mppad = (GstMultipartPad *) gst_pad_get_element_private (pad); + + GST_DEBUG ("multipart_mux: sinkconnect triggered on %s", + gst_pad_get_name (pad)); + + structure = gst_caps_get_structure (vscaps, 0); + mppad->mimetype = gst_structure_get_name (structure); + + return GST_PAD_LINK_OK; +} + +static void +gst_multipart_mux_pad_link (GstPad * pad, GstPad * peer, gpointer data) +{ + //GstMultipartMux *multipart_mux = GST_MULTIPART_MUX (data); + const gchar *padname = gst_pad_get_name (pad); + + GST_DEBUG ("pad '%s' connected", padname); +} + +static void +gst_multipart_mux_pad_unlink (GstPad * pad, GstPad * peer, gpointer data) +{ + //GstMultipartMux *multipart_mux = GST_MULTIPART_MUX (data); + const gchar *padname = gst_pad_get_name (pad); + + GST_DEBUG ("pad '%s' unlinked", padname); +} + +static GstPad * +gst_multipart_mux_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * req_name) +{ + GstMultipartMux *multipart_mux; + GstPad *newpad; + GstElementClass *klass = GST_ELEMENT_GET_CLASS (element); + + g_return_val_if_fail (templ != NULL, NULL); + + if (templ->direction != GST_PAD_SINK) { + g_warning ("multipart_mux: request pad that is not a SINK pad\n"); + return NULL; + } + + g_return_val_if_fail (GST_IS_MULTIPART_MUX (element), NULL); + + multipart_mux = GST_MULTIPART_MUX (element); + + if (templ == gst_element_class_get_pad_template (klass, "sink_%d")) { + gchar *name; + + /* create new pad with the name */ + name = g_strdup_printf ("sink_%02d", multipart_mux->numpads); + newpad = gst_pad_new_from_template (templ, name); + g_free (name); + + /* construct our own wrapper data structure for the pad to + * keep track of its status */ + { + GstMultipartPad *multipartpad = g_new0 (GstMultipartPad, 1); + + multipartpad->pad = newpad; + multipartpad->eos = FALSE; + + /* save a pointer to our data in the pad */ + gst_pad_set_element_private (newpad, multipartpad); + /* store our data for the pad */ + multipart_mux->sinkpads = + g_slist_prepend (multipart_mux->sinkpads, multipartpad); + multipart_mux->numpads++; + } + } else { + g_warning ("multipart_mux: this is not our template!\n"); + return NULL; + } + + g_signal_connect (newpad, "linked", + G_CALLBACK (gst_multipart_mux_pad_link), (gpointer) multipart_mux); + g_signal_connect (newpad, "unlinked", + G_CALLBACK (gst_multipart_mux_pad_unlink), (gpointer) multipart_mux); + + /* setup some pad functions */ + gst_pad_set_link_function (newpad, gst_multipart_mux_sinkconnect); + gst_pad_set_event_mask_function (newpad, + gst_multipart_mux_get_sink_event_masks); + /* dd the pad to the element */ + gst_element_add_pad (element, newpad); + + return newpad; +} + +/* handle events */ +static gboolean +gst_multipart_mux_handle_src_event (GstPad * pad, GstEvent * event) +{ + GstMultipartMux *multipart_mux; + GstEventType type; + + multipart_mux = GST_MULTIPART_MUX (gst_pad_get_parent (pad)); + + type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN; + + switch (type) { + case GST_EVENT_SEEK: + /* disable seeking for now */ + return FALSE; + default: + break; + } + + return gst_pad_event_default (pad, event); +} + +static GstBuffer * +gst_multipart_mux_next_buffer (GstMultipartPad * pad) +{ + GstData *data = NULL; + + while (data == NULL) { + GST_LOG ("muxer: pulling %s:%s\n", GST_DEBUG_PAD_NAME (pad->pad)); + data = gst_pad_pull (pad->pad); + /* if it's an event, handle it */ + if (GST_IS_EVENT (data)) { + GstEventType type; + GstMultipartMux *multipart_mux; + GstEvent *event = GST_EVENT (data); + + multipart_mux = GST_MULTIPART_MUX (gst_pad_get_parent (pad->pad)); + type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN; + + switch (type) { + case GST_EVENT_EOS: + return NULL; + default: + gst_pad_event_default (pad->pad, event); + break; + } + data = NULL; + } + } + return GST_BUFFER (data); +} + +/* + * Given two pads, compare the buffers queued on it and return 0 if they have + * an equal priority, 1 if the new pad is better, -1 if the old pad is better + */ +static gint +gst_multipart_mux_compare_pads (GstMultipartMux * multipart_mux, + GstMultipartPad * old, GstMultipartPad * new) +{ + guint64 oldtime, newtime; + + /* if the old pad doesn't contain anything or is even NULL, return + * the new pad as best candidate and vice versa */ + if (old == NULL || old->buffer == NULL) + return 1; + if (new == NULL || new->buffer == NULL) + return -1; + + /* no timestamp on old buffer, it must go first */ + oldtime = GST_BUFFER_TIMESTAMP (old->buffer); + if (oldtime == GST_CLOCK_TIME_NONE) + return -1; + + /* no timestamp on new buffer, it must go first */ + newtime = GST_BUFFER_TIMESTAMP (new->buffer); + if (newtime == GST_CLOCK_TIME_NONE) + return 1; + + /* old buffer has higher timestamp, new one should go first */ + if (newtime < oldtime) + return 1; + /* new buffer has higher timestamp, old one should go first */ + else if (newtime > oldtime) + return -1; + + /* same priority if all of the above failed */ + return 0; +} + +/* make sure a buffer is queued on all pads, returns a pointer to an multipartpad + * that holds the best buffer or NULL when no pad was usable */ +static GstMultipartPad * +gst_multipart_mux_queue_pads (GstMultipartMux * multipart_mux) +{ + GstMultipartPad *bestpad = NULL; + GSList *walk; + + /* try to make sure we have a buffer from each usable pad first */ + walk = multipart_mux->sinkpads; + while (walk) { + GstMultipartPad *pad = (GstMultipartPad *) walk->data; + + walk = walk->next; + + /* try to get a new buffer for this pad if needed and possible */ + if (pad->buffer == NULL && GST_PAD_IS_USABLE (pad->pad)) { + pad->buffer = gst_multipart_mux_next_buffer (pad); + /* no next buffer, try another pad */ + if (pad->buffer == NULL) + continue; + } + + /* skip unusable pads */ + if (!GST_PAD_IS_USABLE (pad->pad)) + continue; + + /* we should have a buffer now, see if it is the best pad to + * pull on */ + if (pad->buffer != NULL) { + if (gst_multipart_mux_compare_pads (multipart_mux, bestpad, pad) > 0) { + bestpad = pad; + } + } + } + return bestpad; +} + +/* basic idea: + * + * 1) find a pad to pull on, this is done by pulling on all pads and + * looking at the buffers to decide which one should be muxed first. + * 2) push buffer on best pad, go to 1 + */ +static void +gst_multipart_mux_loop (GstElement * element) +{ + GstMultipartMux *mux; + + mux = GST_MULTIPART_MUX (element); + + /* if we don't know which pad to pull on, find one */ + if (mux->pulling == NULL) { + mux->pulling = gst_multipart_mux_queue_pads (mux); + /* remember timestamp of first buffer for this new pad */ + if (mux->pulling != NULL) { + mux->next_ts = GST_BUFFER_TIMESTAMP (mux->pulling->buffer); + } else { + /* no pad to pull on, send EOS */ + if (GST_PAD_IS_USABLE (mux->srcpad)) + gst_pad_push (mux->srcpad, GST_DATA (gst_event_new (GST_EVENT_EOS))); + gst_element_set_eos (element); + return; + } + } + + /* we are pulling from a pad, continue to do so until a page + * has been filled and pushed */ + if (mux->pulling != NULL) { + GstBuffer *buf, *tmpbuf; + GstMultipartPad *pad = mux->pulling; + GstBuffer *newbuf; + gchar *header; + gint headerlen; + + /* now see if we have a buffer */ + buf = pad->buffer; + if (buf == NULL) { + /* no buffer, get one */ + buf = gst_multipart_mux_next_buffer (pad); + /* data exhausted on this pad (EOS) */ + if (buf == NULL) { + /* stop pulling from the pad */ + mux->pulling = NULL; + return; + } + } + + /* read ahead one more buffer to find EOS */ + tmpbuf = gst_multipart_mux_next_buffer (pad); + /* data exhausted on this pad */ + if (tmpbuf == NULL) { + /* stop pulling from the pad */ + mux->pulling = NULL; + } + + header = g_strdup_printf ("\n--ThisRandomString\nContent-type: %s\n\n", + pad->mimetype); + headerlen = strlen (header); + newbuf = + gst_pad_alloc_buffer (mux->srcpad, GST_BUFFER_OFFSET_NONE, headerlen); + GST_BUFFER_DATA (newbuf) = header; + GST_BUFFER_SIZE (newbuf) = headerlen; + GST_BUFFER_TIMESTAMP (newbuf) = GST_BUFFER_TIMESTAMP (buf); + gst_pad_push (mux->srcpad, GST_DATA (newbuf)); + gst_pad_push (mux->srcpad, GST_DATA (buf)); + + /* store new readahead buffer */ + pad->buffer = tmpbuf; + + /* we're done pulling on this pad, make sure to choose a new + * pad for pulling in the next iteration */ + mux->pulling = NULL; + } +} + +static void +gst_multipart_mux_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + switch (prop_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_multipart_mux_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + switch (prop_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstElementStateReturn +gst_multipart_mux_change_state (GstElement * element) +{ + GstMultipartMux *multipart_mux; + gint transition = GST_STATE_TRANSITION (element); + + g_return_val_if_fail (GST_IS_MULTIPART_MUX (element), GST_STATE_FAILURE); + + multipart_mux = GST_MULTIPART_MUX (element); + + switch (transition) { + case GST_STATE_NULL_TO_READY: + case GST_STATE_READY_TO_PAUSED: + multipart_mux->next_ts = 0; + multipart_mux->offset = 0; + multipart_mux->pulling = NULL; + break; + case GST_STATE_PAUSED_TO_PLAYING: + case GST_STATE_PLAYING_TO_PAUSED: + case GST_STATE_PAUSED_TO_READY: + case GST_STATE_READY_TO_NULL: + break; + } + + if (GST_ELEMENT_CLASS (parent_class)->change_state) + return GST_ELEMENT_CLASS (parent_class)->change_state (element); + + return GST_STATE_SUCCESS; +} + +gboolean +gst_multipart_mux_plugin_init (GstPlugin * plugin) +{ + GST_DEBUG_CATEGORY_INIT (gst_multipart_mux_debug, "multipartmux", 0, + "multipart muxer"); + + return gst_element_register (plugin, "multipartmux", GST_RANK_PRIMARY, + GST_TYPE_MULTIPART_MUX); +} diff --git a/gst/videobox/Makefile.am b/gst/videobox/Makefile.am new file mode 100644 index 00000000..37da9aae --- /dev/null +++ b/gst/videobox/Makefile.am @@ -0,0 +1,9 @@ + +plugin_LTLIBRARIES = libgstvideobox.la + +libgstvideobox_la_SOURCES = gstvideobox.c +libgstvideobox_la_CFLAGS = $(GST_CFLAGS) +libgstvideobox_la_LIBADD = +libgstvideobox_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) + +noinst_HEADERS = diff --git a/gst/videobox/README b/gst/videobox/README new file mode 100644 index 00000000..e71da590 --- /dev/null +++ b/gst/videobox/README @@ -0,0 +1,21 @@ +Videobox +-------- + +This plugin crops or enlarges the image. It takes 4 values as input, a +top, bottom, left and right offset. Positive values will crop that much +pixels from the respective border of the image, negative values will add +that much pixels. When pixels are added, you can specify their color. +Some predefined colors are usable with an enum property. + +The plugin is alpha channel aware and will try to negotiate with a format +that supports alpha channels first. When alpha channel is active two +other properties, alpha and border_alpha can be used to set the alpha +values of the inner picture and the border respectively. an alpha value of +0.0 means total transparency, 1.0 is opaque. + +The videobox plugin has many uses such as doing a mosaic of pictures, +letterboxing video, cutting out pieces of video, picture in picture, etc.. + +TODO + +- add enum to specify common aspect ratios/sizes and add borders/crop diff --git a/gst/videobox/gstvideobox.c b/gst/videobox/gstvideobox.c new file mode 100644 index 00000000..c1d9c3ba --- /dev/null +++ b/gst/videobox/gstvideobox.c @@ -0,0 +1,734 @@ +/* GStreamer + * Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include <gst/gst.h> +#include <gst/video/video.h> + +#include <string.h> + +#define GST_TYPE_VIDEO_BOX \ + (gst_video_box_get_type()) +#define GST_VIDEO_BOX(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_BOX,GstVideoBox)) +#define GST_VIDEO_BOX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_BOX,GstVideoBoxClass)) +#define GST_IS_VIDEO_BOX(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_BOX)) +#define GST_IS_VIDEO_BOX_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_BOX)) + +typedef struct _GstVideoBox GstVideoBox; +typedef struct _GstVideoBoxClass GstVideoBoxClass; + +typedef enum +{ + VIDEO_BOX_FILL_BLACK, + VIDEO_BOX_FILL_GREEN, + VIDEO_BOX_FILL_BLUE, +} +GstVideoBoxFill; + +struct _GstVideoBox +{ + GstElement element; + + /* pads */ + GstPad *sinkpad; + GstPad *srcpad; + + /* caps */ + gint in_width, in_height; + gint out_width, out_height; + + gint box_left, box_right, box_top, box_bottom; + + gint border_left, border_right, border_top, border_bottom; + gint crop_left, crop_right, crop_top, crop_bottom; + + gboolean use_alpha; + gdouble alpha; + gdouble border_alpha; + + GstVideoBoxFill fill_type; +}; + +struct _GstVideoBoxClass +{ + GstElementClass parent_class; +}; + +/* elementfactory information */ +static GstElementDetails gst_video_box_details = +GST_ELEMENT_DETAILS ("video box filter", + "Filter/Effect/Video", + "Resizes a video by adding borders or cropping", + "Wim Taymans <wim@fluendo.com>"); + + +/* VideoBox signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +#define DEFAULT_LEFT 0 +#define DEFAULT_RIGHT 0 +#define DEFAULT_TOP 0 +#define DEFAULT_BOTTOM 0 +#define DEFAULT_FILL_TYPE VIDEO_BOX_FILL_BLACK +#define DEFAULT_ALPHA 1.0 +#define DEFAULT_BORDER_ALPHA 1.0 + +enum +{ + ARG_0, + ARG_LEFT, + ARG_RIGHT, + ARG_TOP, + ARG_BOTTOM, + ARG_FILL_TYPE, + ARG_ALPHA, + ARG_BORDER_ALPHA, + /* FILL ME */ +}; + +static GstStaticPadTemplate gst_video_box_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, AYUV }")) + ); + +static GstStaticPadTemplate gst_video_box_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + ); + + +static void gst_video_box_base_init (gpointer g_class); +static void gst_video_box_class_init (GstVideoBoxClass * klass); +static void gst_video_box_init (GstVideoBox * video_box); + +static void gst_video_box_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_video_box_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static GstPadLinkReturn +gst_video_box_sink_link (GstPad * pad, const GstCaps * caps); +static void gst_video_box_chain (GstPad * pad, GstData * _data); + +static GstElementStateReturn gst_video_box_change_state (GstElement * element); + + +static GstElementClass *parent_class = NULL; + +#define GST_TYPE_VIDEO_BOX_FILL (gst_video_box_fill_get_type()) +static GType +gst_video_box_fill_get_type (void) +{ + static GType video_box_fill_type = 0; + static GEnumValue video_box_fill[] = { + {VIDEO_BOX_FILL_BLACK, "0", "Black"}, + {VIDEO_BOX_FILL_GREEN, "1", "Colorkey green"}, + {VIDEO_BOX_FILL_BLUE, "2", "Colorkey blue"}, + {0, NULL, NULL}, + }; + + if (!video_box_fill_type) { + video_box_fill_type = + g_enum_register_static ("GstVideoBoxFill", video_box_fill); + } + return video_box_fill_type; +} + +/* static guint gst_video_box_signals[LAST_SIGNAL] = { 0 }; */ + +GType +gst_video_box_get_type (void) +{ + static GType video_box_type = 0; + + if (!video_box_type) { + static const GTypeInfo video_box_info = { + sizeof (GstVideoBoxClass), + gst_video_box_base_init, + NULL, + (GClassInitFunc) gst_video_box_class_init, + NULL, + NULL, + sizeof (GstVideoBox), + 0, + (GInstanceInitFunc) gst_video_box_init, + }; + + video_box_type = + g_type_register_static (GST_TYPE_ELEMENT, "GstVideoBox", + &video_box_info, 0); + } + return video_box_type; +} + +static void +gst_video_box_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_set_details (element_class, &gst_video_box_details); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_video_box_sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_video_box_src_template)); +} +static void +gst_video_box_class_init (GstVideoBoxClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); + + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FILL_TYPE, + g_param_spec_enum ("fill", "Fill", "How to fill the borders", + GST_TYPE_VIDEO_BOX_FILL, DEFAULT_FILL_TYPE, + (GParamFlags) G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LEFT, + g_param_spec_int ("left", "Left", "Pixels to box at left", + G_MININT, G_MAXINT, DEFAULT_LEFT, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_RIGHT, + g_param_spec_int ("right", "Right", "Pixels to box at right", + G_MININT, G_MAXINT, DEFAULT_RIGHT, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_TOP, + g_param_spec_int ("top", "Top", "Pixels to box at top", + G_MININT, G_MAXINT, DEFAULT_TOP, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BOTTOM, + g_param_spec_int ("bottom", "Bottom", "Pixels to box at bottom", + G_MININT, G_MAXINT, DEFAULT_BOTTOM, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ALPHA, + g_param_spec_double ("alpha", "Alpha", "Alpha value picture", + 0.0, 1.0, DEFAULT_ALPHA, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BORDER_ALPHA, + g_param_spec_double ("border_alpha", "Border Alpha", + "Alpha value of the border", 0.0, 1.0, DEFAULT_BORDER_ALPHA, + G_PARAM_READWRITE)); + + gobject_class->set_property = gst_video_box_set_property; + gobject_class->get_property = gst_video_box_get_property; + + gstelement_class->change_state = gst_video_box_change_state; +} + +static void +gst_video_box_init (GstVideoBox * video_box) +{ + /* create the sink and src pads */ + video_box->sinkpad = + gst_pad_new_from_template (gst_static_pad_template_get + (&gst_video_box_sink_template), "sink"); + gst_element_add_pad (GST_ELEMENT (video_box), video_box->sinkpad); + gst_pad_set_chain_function (video_box->sinkpad, gst_video_box_chain); + gst_pad_set_link_function (video_box->sinkpad, gst_video_box_sink_link); + + video_box->srcpad = + gst_pad_new_from_template (gst_static_pad_template_get + (&gst_video_box_src_template), "src"); + gst_element_add_pad (GST_ELEMENT (video_box), video_box->srcpad); + + video_box->box_right = DEFAULT_RIGHT; + video_box->box_left = DEFAULT_LEFT; + video_box->box_top = DEFAULT_TOP; + video_box->box_bottom = DEFAULT_BOTTOM; + video_box->fill_type = DEFAULT_FILL_TYPE; + video_box->alpha = DEFAULT_ALPHA; + video_box->border_alpha = DEFAULT_BORDER_ALPHA; + + GST_FLAG_SET (video_box, GST_ELEMENT_EVENT_AWARE); +} + +/* do we need this function? */ +static void +gst_video_box_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstVideoBox *video_box; + + /* it's not null if we got it, but it might not be ours */ + g_return_if_fail (GST_IS_VIDEO_BOX (object)); + + video_box = GST_VIDEO_BOX (object); + + switch (prop_id) { + case ARG_LEFT: + video_box->box_left = g_value_get_int (value); + if (video_box->box_left < 0) { + video_box->border_left = -video_box->box_left; + video_box->crop_left = 0; + } else { + video_box->border_left = 0; + video_box->crop_left = video_box->box_left; + } + break; + case ARG_RIGHT: + video_box->box_right = g_value_get_int (value); + if (video_box->box_right < 0) { + video_box->border_right = -video_box->box_right; + video_box->crop_right = 0; + } else { + video_box->border_right = 0; + video_box->crop_right = video_box->box_right; + } + break; + case ARG_TOP: + video_box->box_top = g_value_get_int (value); + if (video_box->box_top < 0) { + video_box->border_top = -video_box->box_top; + video_box->crop_top = 0; + } else { + video_box->border_top = 0; + video_box->crop_top = video_box->box_top; + } + break; + case ARG_BOTTOM: + video_box->box_bottom = g_value_get_int (value); + if (video_box->box_bottom < 0) { + video_box->border_bottom = -video_box->box_bottom; + video_box->crop_bottom = 0; + } else { + video_box->border_bottom = 0; + video_box->crop_bottom = video_box->box_bottom; + } + break; + case ARG_FILL_TYPE: + video_box->fill_type = g_value_get_enum (value); + break; + case ARG_ALPHA: + video_box->alpha = g_value_get_double (value); + break; + case ARG_BORDER_ALPHA: + video_box->border_alpha = g_value_get_double (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} +static void +gst_video_box_get_property (GObject * object, guint prop_id, GValue * value, + GParamSpec * pspec) +{ + GstVideoBox *video_box; + + /* it's not null if we got it, but it might not be ours */ + g_return_if_fail (GST_IS_VIDEO_BOX (object)); + + video_box = GST_VIDEO_BOX (object); + + switch (prop_id) { + case ARG_LEFT: + g_value_set_int (value, video_box->box_left); + break; + case ARG_RIGHT: + g_value_set_int (value, video_box->box_right); + break; + case ARG_TOP: + g_value_set_int (value, video_box->box_top); + break; + case ARG_BOTTOM: + g_value_set_int (value, video_box->box_bottom); + break; + case ARG_FILL_TYPE: + g_value_set_enum (value, video_box->fill_type); + break; + case ARG_ALPHA: + g_value_set_double (value, video_box->alpha); + break; + case ARG_BORDER_ALPHA: + g_value_set_double (value, video_box->border_alpha); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstPadLinkReturn +gst_video_box_sink_link (GstPad * pad, const GstCaps * caps) +{ + GstVideoBox *video_box; + GstStructure *structure; + gboolean ret; + + video_box = GST_VIDEO_BOX (gst_pad_get_parent (pad)); + structure = gst_caps_get_structure (caps, 0); + + ret = gst_structure_get_int (structure, "width", &video_box->in_width); + ret &= gst_structure_get_int (structure, "height", &video_box->in_height); + + return GST_PAD_LINK_OK; +} + +#define GST_VIDEO_I420_Y_OFFSET(width,height) (0) +#define GST_VIDEO_I420_U_OFFSET(width,height) ((width)*(height)) +#define GST_VIDEO_I420_V_OFFSET(width,height) ((width)*(height) + ((width/2)*(height/2))) + +#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (width) +#define GST_VIDEO_I420_U_ROWSTRIDE(width) ((width)/2) +#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((width)/2) + +static int yuv_colors_Y[] = { 16, 150, 29 }; +static int yuv_colors_U[] = { 128, 46, 255 }; +static int yuv_colors_V[] = { 128, 21, 107 }; + +static void +gst_video_box_i420 (GstVideoBox * video_box, guint8 * src, guint8 * dest) +{ + guint8 *srcY, *srcU, *srcV; + guint8 *destY, *destU, *destV; + gint crop_width, crop_height; + gint out_width, out_height; + gint src_stride; + gint br, bl, bt, bb; + gint j; + gint color1, color2; + + br = video_box->border_right; + bl = video_box->border_left; + bt = video_box->border_top; + bb = video_box->border_bottom; + + out_width = video_box->out_width; + out_height = video_box->out_height; + + destY = dest + GST_VIDEO_I420_Y_OFFSET (out_width, out_height); + + srcY = + src + GST_VIDEO_I420_Y_OFFSET (video_box->in_width, video_box->in_height); + src_stride = GST_VIDEO_I420_Y_ROWSTRIDE (video_box->in_width); + + crop_width = + video_box->in_width - (video_box->crop_left + video_box->crop_right); + crop_height = + video_box->in_height - (video_box->crop_top + video_box->crop_bottom); + + srcY += src_stride * video_box->crop_top + video_box->crop_left; + + color1 = yuv_colors_Y[video_box->fill_type]; + + /* copy Y plane first */ + for (j = 0; j < bt; j++) { + memset (destY, color1, out_width); + destY += out_width; + } + for (j = 0; j < crop_height; j++) { + memset (destY, color1, bl); + destY += bl; + memcpy (destY, srcY, crop_width); + destY += crop_width; + memset (destY, color1, br); + destY += br; + srcY += src_stride; + } + for (j = 0; j < bb; j++) { + memset (destY, color1, out_width); + destY += out_width; + } + + src_stride = GST_VIDEO_I420_U_ROWSTRIDE (video_box->in_width); + + destU = dest + GST_VIDEO_I420_U_OFFSET (out_width, out_height); + destV = dest + GST_VIDEO_I420_V_OFFSET (out_width, out_height); + + crop_width /= 2; + crop_height /= 2; + out_width /= 2; + out_height /= 2; + bb /= 2; + bt /= 2; + br /= 2; + bl /= 2; + + srcU = + src + GST_VIDEO_I420_U_OFFSET (video_box->in_width, video_box->in_height); + srcV = + src + GST_VIDEO_I420_V_OFFSET (video_box->in_width, video_box->in_height); + srcU += src_stride * (video_box->crop_top / 2) + (video_box->crop_left / 2); + srcV += src_stride * (video_box->crop_top / 2) + (video_box->crop_left / 2); + + color1 = yuv_colors_U[video_box->fill_type]; + color2 = yuv_colors_V[video_box->fill_type]; + + for (j = 0; j < bt; j++) { + memset (destU, color1, out_width); + memset (destV, color2, out_width); + destU += out_width; + destV += out_width; + } + for (j = 0; j < crop_height; j++) { + memset (destU, color1, bl); + destU += bl; + /* copy U plane */ + memcpy (destU, srcU, crop_width); + destU += crop_width; + memset (destU, color1, br); + destU += br; + srcU += src_stride; + + memset (destV, color2, bl); + destV += bl; + /* copy V plane */ + memcpy (destV, srcV, crop_width); + destV += crop_width; + memset (destV, color2, br); + destV += br; + srcV += src_stride; + } + for (j = 0; j < bb; j++) { + memset (destU, color1, out_width); + memset (destV, color2, out_width); + destU += out_width; + destV += out_width; + } +} + +static void +gst_video_box_ayuv (GstVideoBox * video_box, guint8 * src, guint8 * dest) +{ + guint8 *srcY, *srcU, *srcV; + gint crop_width, crop_width2, crop_height; + gint out_width, out_height; + gint src_stride, src_stride2; + gint br, bl, bt, bb; + gint colorY, colorU, colorV; + gint i, j; + guint8 b_alpha = (guint8) (video_box->border_alpha * 255); + guint8 i_alpha = (guint8) (video_box->alpha * 255); + guint32 *destp = (guint32 *) dest; + guint32 ayuv; + + br = video_box->border_right; + bl = video_box->border_left; + bt = video_box->border_top; + bb = video_box->border_bottom; + + out_width = video_box->out_width; + out_height = video_box->out_height; + + src_stride = GST_VIDEO_I420_Y_ROWSTRIDE (video_box->in_width); + src_stride2 = src_stride / 2; + + crop_width = + video_box->in_width - (video_box->crop_left + video_box->crop_right); + crop_width2 = crop_width / 2; + crop_height = + video_box->in_height - (video_box->crop_top + video_box->crop_bottom); + + srcY = + src + GST_VIDEO_I420_Y_OFFSET (video_box->in_width, video_box->in_height); + srcY += src_stride * video_box->crop_top + video_box->crop_left; + srcU = + src + GST_VIDEO_I420_U_OFFSET (video_box->in_width, video_box->in_height); + srcU += src_stride2 * (video_box->crop_top / 2) + (video_box->crop_left / 2); + srcV = + src + GST_VIDEO_I420_V_OFFSET (video_box->in_width, video_box->in_height); + srcV += src_stride2 * (video_box->crop_top / 2) + (video_box->crop_left / 2); + + colorY = yuv_colors_Y[video_box->fill_type]; + colorU = yuv_colors_U[video_box->fill_type]; + colorV = yuv_colors_V[video_box->fill_type]; + + ayuv = + GUINT32_FROM_BE ((b_alpha << 24) | (colorY << 16) | (colorU << 8) | + colorV); + + /* top border */ + for (i = 0; i < bt; i++) { + for (j = 0; j < out_width; j++) { + *destp++ = ayuv; + } + } + for (i = 0; i < crop_height; i++) { + /* left border */ + for (j = 0; j < bl; j++) { + *destp++ = ayuv; + } + dest = (guint8 *) destp; + /* center */ + for (j = 0; j < crop_width2; j++) { + *dest++ = i_alpha; + *dest++ = *srcY++; + *dest++ = *srcU; + *dest++ = *srcV; + *dest++ = i_alpha; + *dest++ = *srcY++; + *dest++ = *srcU++; + *dest++ = *srcV++; + } + if (i % 2 == 0) { + srcU -= crop_width2; + srcV -= crop_width2; + } else { + srcU += src_stride2 - crop_width2; + srcV += src_stride2 - crop_width2; + } + srcY += src_stride - crop_width; + + destp = (guint32 *) dest; + /* right border */ + for (j = 0; j < br; j++) { + *destp++ = ayuv; + } + } + /* bottom border */ + for (i = 0; i < bb; i++) { + for (j = 0; j < out_width; j++) { + *destp++ = ayuv; + } + } +} + +static void +gst_video_box_chain (GstPad * pad, GstData * _data) +{ + GstBuffer *buffer; + GstVideoBox *video_box; + GstBuffer *outbuf; + gint new_width, new_height; + + video_box = GST_VIDEO_BOX (gst_pad_get_parent (pad)); + + if (GST_IS_EVENT (_data)) { + GstEvent *event = GST_EVENT (_data); + + switch (GST_EVENT_TYPE (event)) { + default: + gst_pad_event_default (pad, event); + break; + } + return; + } + + buffer = GST_BUFFER (_data); + + new_width = + video_box->in_width - (video_box->box_left + video_box->box_right); + new_height = + video_box->in_height - (video_box->box_top + video_box->box_bottom); + + if (new_width != video_box->out_width || + new_height != video_box->out_height || + !GST_PAD_CAPS (video_box->srcpad)) { + GstCaps *newcaps; + + newcaps = gst_caps_copy (gst_pad_get_negotiated_caps (video_box->sinkpad)); + + video_box->use_alpha = TRUE; + + /* try AYUV first */ + gst_caps_set_simple (newcaps, + "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("AYUV"), + "width", G_TYPE_INT, new_width, "height", G_TYPE_INT, new_height, NULL); + + if (GST_PAD_LINK_FAILED (gst_pad_try_set_caps (video_box->srcpad, newcaps))) { + video_box->use_alpha = FALSE; + newcaps = + gst_caps_copy (gst_pad_get_negotiated_caps (video_box->sinkpad)); + gst_caps_set_simple (newcaps, "format", GST_TYPE_FOURCC, + GST_STR_FOURCC ("I420"), "width", G_TYPE_INT, new_width, "height", + G_TYPE_INT, new_height, NULL); + + if (GST_PAD_LINK_FAILED (gst_pad_try_set_caps (video_box->srcpad, + newcaps))) { + GST_ELEMENT_ERROR (video_box, CORE, NEGOTIATION, (NULL), (NULL)); + return; + } + } + + video_box->out_width = new_width; + video_box->out_height = new_height; + } + + if (video_box->use_alpha) { + outbuf = gst_buffer_new_and_alloc (new_width * new_height * 4); + + gst_video_box_ayuv (video_box, + GST_BUFFER_DATA (buffer), GST_BUFFER_DATA (outbuf)); + } else { + outbuf = gst_buffer_new_and_alloc ((new_width * new_height * 3) / 2); + + gst_video_box_i420 (video_box, + GST_BUFFER_DATA (buffer), GST_BUFFER_DATA (outbuf)); + } + GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer); + GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buffer); + + + gst_buffer_unref (buffer); + + gst_pad_push (video_box->srcpad, GST_DATA (outbuf)); +} + +static GstElementStateReturn +gst_video_box_change_state (GstElement * element) +{ + GstVideoBox *video_box; + + video_box = GST_VIDEO_BOX (element); + + switch (GST_STATE_TRANSITION (element)) { + case GST_STATE_NULL_TO_READY: + break; + case GST_STATE_READY_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_PLAYING: + break; + case GST_STATE_PLAYING_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_READY: + break; + case GST_STATE_READY_TO_NULL: + break; + } + + parent_class->change_state (element); + + return GST_STATE_SUCCESS; +} + +static gboolean +plugin_init (GstPlugin * plugin) +{ + return gst_element_register (plugin, "videobox", GST_RANK_NONE, + GST_TYPE_VIDEO_BOX); +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "videobox", + "resizes a video by adding borders or cropping", + plugin_init, VERSION, GST_LICENSE, GST_PACKAGE, GST_ORIGIN) diff --git a/gst/videomixer/Makefile.am b/gst/videomixer/Makefile.am new file mode 100644 index 00000000..ab5f423a --- /dev/null +++ b/gst/videomixer/Makefile.am @@ -0,0 +1,9 @@ +plugindir = $(libdir)/gstreamer-@GST_MAJORMINOR@ + +plugin_LTLIBRARIES = libvideomixer.la + +libvideomixer_la_SOURCES = videomixer.c +libvideomixer_la_CFLAGS = $(GST_CFLAGS) +libvideomixer_la_LIBADD = +libvideomixer_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) + diff --git a/gst/videomixer/README b/gst/videomixer/README new file mode 100644 index 00000000..6794a481 --- /dev/null +++ b/gst/videomixer/README @@ -0,0 +1,27 @@ +Video Mixer +----------- + +A generice video mixer, it blends the ayuv buffers from all pads onto +a new buffer. The new buffer has by default a checkerboard pattern but +its color can be changed with a property. +The mixer can mix streams with different framerates and video sizes. It +uses the duration value of the buffer to schedule the rendering of the +buffers. For streams with a different resoltion than the final output +resolution one can specify the position of the top left corner where this +image should be placed with the pad properties xpos and ypos. +The overall alpha value of a stream can also be specified with a pad +property. +By default, the streams are blended in the order that the pads were +requested from the element. This can be overridden by changing the +zorder pad property of the stream, a stream with lower zorder gets +drawn first. + + +TODO +---- + +- really implement zorder +- take I420 yuv as well +- output AYUV if possible. +- implement different blend modes, some code is already done +- use filter caps on srcpad to decide on the final output size diff --git a/gst/videomixer/videomixer.c b/gst/videomixer/videomixer.c new file mode 100644 index 00000000..5211ba1a --- /dev/null +++ b/gst/videomixer/videomixer.c @@ -0,0 +1,1116 @@ +/* Generic video mixer plugin + * Copyright (C) 2004 Wim Taymans <wim@fluendo.com> + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include <gst/gst.h> +#include <string.h> + +GST_DEBUG_CATEGORY_STATIC (gst_videomixer_debug); +#define GST_CAT_DEFAULT gst_videomixer_debug + +#define GST_TYPE_VIDEO_MIXER_PAD (gst_videomixer_pad_get_type()) +#define GST_VIDEO_MIXER_PAD(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MIXER_PAD, GstVideoMixerPad)) +#define GST_VIDEO_MIXER_PAD_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MIXER_PAD, GstVideoMixerPadiClass)) +#define GST_IS_VIDEO_MIXER_PAD(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MIXER_PAD)) +#define GST_IS_VIDEO_MIXER_PAD_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MIXER_PAD)) + +typedef struct _GstVideoMixerPad GstVideoMixerPad; +typedef struct _GstVideoMixerPadClass GstVideoMixerPadClass; + +static void gst_videomixer_pad_base_init (gpointer g_class); +static void gst_videomixer_pad_class_init (GstVideoMixerPadClass * klass); +static void gst_videomixer_pad_init (GstVideoMixerPad * mixerpad); + +static void gst_videomixer_pad_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); +static void gst_videomixer_pad_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); + +#define DEFAULT_PAD_ZORDER 0 +#define DEFAULT_PAD_XPOS 0 +#define DEFAULT_PAD_YPOS 0 +#define DEFAULT_PAD_ALPHA 1.0 +enum +{ + ARG_PAD_0, + ARG_PAD_ZORDER, + ARG_PAD_XPOS, + ARG_PAD_YPOS, + ARG_PAD_ALPHA, +}; + +/* all information needed for one video stream */ +struct _GstVideoMixerPad +{ + GstRealPad parent; /* subclass the pad */ + + GstBuffer *buffer; /* the queued buffer for this pad */ + gboolean eos; + + gint64 queued; + + guint in_width, in_height; + gdouble in_framerate; + + gint xpos, ypos; + guint zorder; + gint blend_mode; + gdouble alpha; +}; + +struct _GstVideoMixerPadClass +{ + GstRealPadClass parent_class; +}; + +GType +gst_videomixer_pad_get_type (void) +{ + static GType videomixer_pad_type = 0; + + if (!videomixer_pad_type) { + static const GTypeInfo videomixer_pad_info = { + sizeof (GstVideoMixerPadClass), + gst_videomixer_pad_base_init, + NULL, + (GClassInitFunc) gst_videomixer_pad_class_init, + NULL, + NULL, + sizeof (GstVideoMixerPad), + 0, + (GInstanceInitFunc) gst_videomixer_pad_init, + }; + + videomixer_pad_type = + g_type_register_static (GST_TYPE_REAL_PAD, + "GstVideoMixerPad", &videomixer_pad_info, 0); + } + return videomixer_pad_type; +} + +static void +gst_videomixer_pad_base_init (gpointer g_class) +{ +} + +static void +gst_videomixer_pad_class_init (GstVideoMixerPadClass * klass) +{ + GObjectClass *gobject_class; + + gobject_class = (GObjectClass *) klass; + + gobject_class->set_property = + GST_DEBUG_FUNCPTR (gst_videomixer_pad_set_property); + gobject_class->get_property = + GST_DEBUG_FUNCPTR (gst_videomixer_pad_get_property); + + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PAD_ZORDER, + g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture", + 0, 10000, DEFAULT_PAD_ZORDER, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PAD_XPOS, + g_param_spec_int ("xpos", "X Position", "X Position of the picture", + G_MININT, G_MAXINT, DEFAULT_PAD_XPOS, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PAD_YPOS, + g_param_spec_int ("ypos", "Y Position", "Y Position of the picture", + G_MININT, G_MAXINT, DEFAULT_PAD_YPOS, G_PARAM_READWRITE)); + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PAD_ALPHA, + g_param_spec_double ("alpha", "Alpha", "Alpha of the picture", + 0.0, 1.0, DEFAULT_PAD_ALPHA, G_PARAM_READWRITE)); +} + +static const GstEventMask * +gst_videomixer_pad_get_sink_event_masks (GstPad * pad) +{ + static const GstEventMask gst_videomixer_sink_event_masks[] = { + {GST_EVENT_EOS, 0}, + {0,} + }; + + return gst_videomixer_sink_event_masks; +} + +static void +gst_videomixer_pad_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstVideoMixerPad *pad; + + g_return_if_fail (GST_IS_VIDEO_MIXER_PAD (object)); + + pad = GST_VIDEO_MIXER_PAD (object); + + switch (prop_id) { + case ARG_PAD_ZORDER: + g_value_set_uint (value, pad->zorder); + break; + case ARG_PAD_XPOS: + g_value_set_int (value, pad->xpos); + break; + case ARG_PAD_YPOS: + g_value_set_int (value, pad->ypos); + break; + case ARG_PAD_ALPHA: + g_value_set_double (value, pad->alpha); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_videomixer_pad_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstVideoMixerPad *pad; + + g_return_if_fail (GST_IS_PAD (object)); + + pad = GST_VIDEO_MIXER_PAD (object); + + switch (prop_id) { + case ARG_PAD_ZORDER: + pad->zorder = g_value_get_uint (value); + break; + case ARG_PAD_XPOS: + pad->xpos = g_value_get_int (value); + break; + case ARG_PAD_YPOS: + pad->ypos = g_value_get_int (value); + break; + case ARG_PAD_ALPHA: + pad->alpha = g_value_get_double (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +#define GST_TYPE_VIDEO_MIXER (gst_videomixer_get_type()) +#define GST_VIDEO_MIXER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_MIXER, GstVideoMixer)) +#define GST_VIDEO_MIXER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_MIXER, GstVideoMixerClass)) +#define GST_IS_VIDEO_MIXER(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_MIXER)) +#define GST_IS_VIDEO_MIXER_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_MIXER)) + +typedef struct _GstVideoMixer GstVideoMixer; +typedef struct _GstVideoMixerClass GstVideoMixerClass; + +GType gst_videomixer_get_type (void); + +typedef enum +{ + VIDEO_MIXER_BACKGROUND_CHECKER, + VIDEO_MIXER_BACKGROUND_BLACK, + VIDEO_MIXER_BACKGROUND_WHITE, +} +GstVideoMixerBackground; + +struct _GstVideoMixer +{ + GstElement element; + + /* pad */ + GstPad *srcpad; + + /* sinkpads, a GSList of GstVideoMixerPads */ + GSList *sinkpads; + gint numpads; + + /* the master pad */ + GstVideoMixerPad *master; + + gint in_width, in_height; + gint out_width, out_height; + + GstVideoMixerBackground background; + + gdouble in_framerate; +}; + +struct _GstVideoMixerClass +{ + GstElementClass parent_class; +}; + +static GstPadLinkReturn +gst_videomixer_pad_sinkconnect (GstPad * pad, const GstCaps * vscaps) +{ + GstVideoMixer *mix; + GstVideoMixerPad *mixpad; + GstStructure *structure; + + mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad)); + mixpad = GST_VIDEO_MIXER_PAD (pad); + + GST_DEBUG ("videomixer: sinkconnect triggered on %s", gst_pad_get_name (pad)); + + structure = gst_caps_get_structure (vscaps, 0); + + gst_structure_get_int (structure, "width", &mixpad->in_width); + gst_structure_get_int (structure, "height", &mixpad->in_height); + gst_structure_get_double (structure, "framerate", &mixpad->in_framerate); + + mixpad->xpos = 0; + mixpad->ypos = 0; + + mix->in_width = MAX (mix->in_width, mixpad->in_width); + mix->in_height = MAX (mix->in_height, mixpad->in_height); + mix->in_framerate = mixpad->in_framerate; + + return GST_PAD_LINK_OK; +} + +static void +gst_videomixer_pad_link (GstPad * pad, GstPad * peer, gpointer data) +{ + //GstVideoMixer *videomixer = GST_VIDEO_MIXER (data); + const gchar *padname = gst_pad_get_name (pad); + + GST_DEBUG ("pad '%s' connected", padname); +} + +static void +gst_videomixer_pad_unlink (GstPad * pad, GstPad * peer, gpointer data) +{ + //GstVideoMixer *videomixer = GST_VIDEO_MIXER (data); + const gchar *padname = gst_pad_get_name (pad); + + GST_DEBUG ("pad '%s' unlinked", padname); +} + +static void +gst_videomixer_pad_init (GstVideoMixerPad * mixerpad) +{ + g_signal_connect (mixerpad, "linked", + G_CALLBACK (gst_videomixer_pad_link), (gpointer) mixerpad); + g_signal_connect (mixerpad, "unlinked", + G_CALLBACK (gst_videomixer_pad_unlink), (gpointer) mixerpad); + + /* setup some pad functions */ + gst_pad_set_link_function (GST_PAD (mixerpad), + gst_videomixer_pad_sinkconnect); + gst_pad_set_event_mask_function (GST_PAD (mixerpad), + gst_videomixer_pad_get_sink_event_masks); + + mixerpad->zorder = DEFAULT_PAD_ZORDER; + mixerpad->xpos = DEFAULT_PAD_XPOS; + mixerpad->ypos = DEFAULT_PAD_YPOS; + mixerpad->alpha = DEFAULT_PAD_ALPHA; +} + + + +/* elementfactory information */ +static GstElementDetails gst_videomixer_details = +GST_ELEMENT_DETAILS ("video mixer", + "Filter/Editor/Video", + "Mix multiple video streams", + "Wim Taymans <wim@fluendo.com>"); + +/* VideoMixer signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +#define DEFAULT_BACKGROUND VIDEO_MIXER_BACKGROUND_CHECKER +enum +{ + ARG_0, + ARG_BACKGROUND, +}; + +#define GST_TYPE_VIDEO_MIXER_BACKGROUND (gst_video_mixer_background_get_type()) +static GType +gst_video_mixer_background_get_type (void) +{ + static GType video_mixer_background_type = 0; + static GEnumValue video_mixer_background[] = { + {VIDEO_MIXER_BACKGROUND_CHECKER, "0", "Checker pattern"}, + {VIDEO_MIXER_BACKGROUND_BLACK, "1", "Black"}, + {VIDEO_MIXER_BACKGROUND_WHITE, "2", "White"}, + {0, NULL, NULL}, + }; + + if (!video_mixer_background_type) { + video_mixer_background_type = + g_enum_register_static ("GstVideoMixerBackground", + video_mixer_background); + } + return video_mixer_background_type; +} + +static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-raw-yuv," + "format = (fourcc) I420," + "width = (int) [ 16, 4096 ]," + "height = (int) [ 16, 4096 ]," "framerate = (double) [ 0, max ]") + ); + +static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%d", + GST_PAD_SINK, + GST_PAD_REQUEST, + GST_STATIC_CAPS ("video/x-raw-yuv," + "format = (fourcc) AYUV," + "width = (int) [ 16, 4096 ]," + "height = (int) [ 16, 4096 ]," "framerate = (double) [ 0, max ]") + ); + +static void gst_videomixer_base_init (gpointer g_class); +static void gst_videomixer_class_init (GstVideoMixerClass * klass); +static void gst_videomixer_init (GstVideoMixer * videomixer); + +static void gst_videomixer_loop (GstElement * element); +static gboolean gst_videomixer_handle_src_event (GstPad * pad, + GstEvent * event); +static GstPad *gst_videomixer_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * name); +static void gst_videomixer_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_videomixer_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); +static GstElementStateReturn gst_videomixer_change_state (GstElement * element); + +static GstElementClass *parent_class = NULL; + +/*static guint gst_videomixer_signals[LAST_SIGNAL] = { 0 }; */ + +GType +gst_videomixer_get_type (void) +{ + static GType videomixer_type = 0; + + if (!videomixer_type) { + static const GTypeInfo videomixer_info = { + sizeof (GstVideoMixerClass), + gst_videomixer_base_init, + NULL, + (GClassInitFunc) gst_videomixer_class_init, + NULL, + NULL, + sizeof (GstVideoMixer), + 0, + (GInstanceInitFunc) gst_videomixer_init, + }; + + videomixer_type = + g_type_register_static (GST_TYPE_ELEMENT, "GstVideoMixer", + &videomixer_info, 0); + } + return videomixer_type; +} + +static void +gst_videomixer_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_factory)); + + gst_element_class_set_details (element_class, &gst_videomixer_details); +} + +static void +gst_videomixer_class_init (GstVideoMixerClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + parent_class = g_type_class_ref (GST_TYPE_ELEMENT); + + g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BACKGROUND, + g_param_spec_enum ("background", "Background", "Background type", + GST_TYPE_VIDEO_MIXER_BACKGROUND, + DEFAULT_BACKGROUND, G_PARAM_READWRITE)); + + gstelement_class->request_new_pad = gst_videomixer_request_new_pad; + + gstelement_class->change_state = gst_videomixer_change_state; + + gstelement_class->get_property = gst_videomixer_get_property; + gstelement_class->set_property = gst_videomixer_set_property; +} + +static void +gst_videomixer_init (GstVideoMixer * mix) +{ + GstElementClass *klass = GST_ELEMENT_GET_CLASS (mix); + + mix->srcpad = + gst_pad_new_from_template (gst_element_class_get_pad_template (klass, + "src"), "src"); + gst_pad_set_event_function (mix->srcpad, gst_videomixer_handle_src_event); + gst_element_add_pad (GST_ELEMENT (mix), mix->srcpad); + + GST_FLAG_SET (GST_ELEMENT (mix), GST_ELEMENT_EVENT_AWARE); + + mix->sinkpads = NULL; + mix->background = DEFAULT_BACKGROUND; + mix->in_width = 0; + mix->in_height = 0; + mix->out_width = 0; + mix->out_height = 0; + + gst_element_set_loop_function (GST_ELEMENT (mix), gst_videomixer_loop); +} + +static GstPad * +gst_videomixer_request_new_pad (GstElement * element, + GstPadTemplate * templ, const gchar * req_name) +{ + GstVideoMixer *mix; + GstPad *newpad; + GstElementClass *klass = GST_ELEMENT_GET_CLASS (element); + + g_return_val_if_fail (templ != NULL, NULL); + + if (templ->direction != GST_PAD_SINK) { + g_warning ("videomixer: request pad that is not a SINK pad\n"); + return NULL; + } + + g_return_val_if_fail (GST_IS_VIDEO_MIXER (element), NULL); + + mix = GST_VIDEO_MIXER (element); + + if (templ == gst_element_class_get_pad_template (klass, "sink_%d")) { + gchar *name; + GstVideoMixerPad *mixpad; + + /* create new pad with the name */ + name = g_strdup_printf ("sink_%02d", mix->numpads); + newpad = + gst_pad_custom_new_from_template (GST_TYPE_VIDEO_MIXER_PAD, templ, + name); + g_free (name); + + mixpad = GST_VIDEO_MIXER_PAD (newpad); + + mixpad->zorder = mix->numpads; + mix->numpads++; + if (mix->numpads == 1) { + mix->master = mixpad; + } + mix->sinkpads = g_slist_append (mix->sinkpads, newpad); + } else { + g_warning ("videomixer: this is not our template!\n"); + return NULL; + } + + /* dd the pad to the element */ + gst_element_add_pad (element, newpad); + + return newpad; +} + +/* handle events */ +static gboolean +gst_videomixer_handle_src_event (GstPad * pad, GstEvent * event) +{ + GstVideoMixer *mix; + GstEventType type; + + mix = GST_VIDEO_MIXER (gst_pad_get_parent (pad)); + + type = event ? GST_EVENT_TYPE (event) : GST_EVENT_UNKNOWN; + + switch (type) { + case GST_EVENT_SEEK: + /* disable seeking for now */ + return FALSE; + default: + break; + } + + return gst_pad_event_default (pad, event); +} + +#define BLEND_NORMAL(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = ((Y1*(255-alpha))+(Y2*alpha))>>8; \ + U = ((U1*(255-alpha))+(U2*alpha))>>8; \ + V = ((V1*(255-alpha))+(V2*alpha))>>8; + +#define BLEND_ADD(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = Y1+((Y2*alpha)>>8); \ + U = U1+(((127*(255-alpha)+(U2*alpha)))>>8)-127; \ + V = V1+(((127*(255-alpha)+(V2*alpha)))>>8)-127; \ + if (Y>255) { \ + gint mult = MAX (0, 288-Y); \ + U = ((U*mult) + (127*(32-mult)))>>5; \ + V = ((V*mult) + (127*(32-mult)))>>5; \ + Y = 255; \ + } \ + U = MIN (U,255) \ + V = MIN (V,255) + +#define BLEND_SUBTRACT(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = Y1-((Y2*alpha)>>8); \ + U = U1+(((127*(255-alpha)+(U2*alpha)))>>8)-127; \ + V = V1+(((127*(255-alpha)+(V2*alpha)))>>8)-127; \ + if (Y<0) { \ + gint mult = MIN (32, -Y); \ + U = ((U*(32-mult)) + (127*mult))>>5; \ + V = ((V*(32-mult)) + (127*mult))>>5; \ + Y = 0; \ + } + +#define BLEND_DARKEN(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + if (Y1 < Y2) { \ + Y = Y1; U = U1; V = V1; \ + } \ + else { \ + Y = ((Y1*(255-alpha))+(Y2*alpha))>>8; \ + U = ((U1*(255-alpha))+(U2*alpha))>>8; \ + V = ((V1*(255-alpha))+(V2*alpha))>>8; \ + } + +#define BLEND_LIGHTEN(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + if (Y1 > Y2) { \ + Y = Y1; U = U1; V = V1; \ + } \ + else { \ + Y = ((Y1*(255-alpha))+(Y2*alpha))>>8; \ + U = ((U1*(255-alpha))+(U2*alpha))>>8; \ + V = ((V1*(255-alpha))+(V2*alpha))>>8; \ + } + +#define BLEND_MULTIPLY(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = (Y1*(256*(255-alpha) +(Y2*alpha)))>>16; \ + U = ((U1*(255-alpha)*256)+(alpha*(U1*Y2+128*(256-Y2))))>>16; \ + V = ((V1*(255-alpha)*256)+(alpha*(V1*Y2+128*(256-Y2))))>>16; + +#define BLEND_DIFFERENCE(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = ABS((gint)Y1-(gint)Y2)+127; \ + U = ABS((gint)U1-(gint)U2)+127; \ + V = ABS((gint)V1-(gint)V2)+127; \ + Y = ((Y*alpha)+(Y1*(255-alpha)))>>8; \ + U = ((U*alpha)+(U1*(255-alpha)))>>8; \ + V = ((V*alpha)+(V1*(255-alpha)))>>8; \ + if (Y>255) { \ + gint mult = MAX (0, 288-Y); \ + U = ((U*mult) + (127*(32-mult)))>>5; \ + V = ((V*mult) + (127*(32-mult)))>>5; \ + Y = 255; \ + } else if (Y<0) { \ + gint mult = MIN (32, -Y); \ + U = ((U*(32-mult)) + (127*mult))>>5; \ + V = ((V*(32-mult)) + (127*mult))>>5; \ + Y = 0; \ + } \ + U = CLAMP(U, 0, 255); \ + V = CLAMP(V, 0, 255); + +#define BLEND_EXCLUSION(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = ((gint)(Y1^0xff)*Y2+(gint)(Y2^0xff)*Y1)>>8; \ + U = ((gint)(U1^0xff)*Y2+(gint)(Y2^0xff)*U1)>>8; \ + V = ((gint)(V1^0xff)*Y2+(gint)(Y2^0xff)*V1)>>8; \ + Y = ((Y*alpha)+(Y1*(255-alpha)))>>8; \ + U = ((U*alpha)+(U1*(255-alpha)))>>8; \ + V = ((V*alpha)+(V1*(255-alpha)))>>8; \ + if (Y>255) { \ + gint mult = MAX (0, 288-Y); \ + U = ((U*mult) + (127*(32-mult)))>>5; \ + V = ((V*mult) + (127*(32-mult)))>>5; \ + Y = 255; \ + } else if (Y<0) { \ + gint mult = MIN (32, -Y); \ + U = ((U*(32-mult)) + (127*mult))>>5; \ + V = ((V*(32-mult)) + (127*mult))>>5; \ + Y = 0; \ + } \ + U = CLAMP(U, 0, 255); \ + V = CLAMP(V, 0, 255); + +#define BLEND_SOFTLIGHT(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = (gint)Y1+(gint)Y2 - 127; \ + U = (gint)U1+(gint)U2 - 127; \ + V = (gint)V1+(gint)V2 - 127; \ + Y = ((Y*alpha)+(Y1*(255-alpha)))>>8; \ + U = ((U*alpha)+(U1*(255-alpha)))>>8; \ + V = ((V*alpha)+(V1*(255-alpha)))>>8; \ + if (Y>255) { \ + gint mult = MAX (0, 288-Y); \ + U = ((U*mult) + (127*(32-mult)))>>5; \ + V = ((V*mult) + (127*(32-mult)))>>5; \ + Y = 255; \ + } else if (Y<0) { \ + gint mult = MIN (32, -Y); \ + U = ((U*(32-mult)) + (127*mult))>>5; \ + V = ((V*(32-mult)) + (127*mult))>>5; \ + Y = 0; \ + } \ + +#define BLEND_HARDLIGHT(Y1,U1,V1,Y2,U2,V2,alpha,Y,U,V) \ + Y = (gint)Y1+(gint)Y2*2 - 255; \ + U = (gint)U1+(gint)U2 - 127; \ + V = (gint)V1+(gint)V2 - 127; \ + Y = ((Y*alpha)+(Y1*(255-alpha)))>>8; \ + U = ((U*alpha)+(U1*(255-alpha)))>>8; \ + V = ((V*alpha)+(V1*(255-alpha)))>>8; \ + if (Y>255) { \ + gint mult = MAX (0, 288-Y); \ + U = ((U*mult) + (127*(32-mult)))>>5; \ + V = ((V*mult) + (127*(32-mult)))>>5; \ + Y = 255; \ + } else if (Y<0) { \ + gint mult = MIN (32, -Y); \ + U = ((U*(32-mult)) + (127*mult))>>5; \ + V = ((V*(32-mult)) + (127*mult))>>5; \ + Y = 0; \ + } \ + +#define BLEND_MODE BLEND_NORMAL +#if 0 +#define BLEND_MODE BLEND_ADD +#define BLEND_MODE BLEND_SUBTRACT +#define BLEND_MODE BLEND_DARKEN +#define BLEND_MODE BLEND_LIGHTEN +#define BLEND_MODE BLEND_MULTIPLY +#define BLEND_MODE BLEND_DIFFERENCE +#define BLEND_MODE BLEND_EXCLUSION +#define BLEND_MODE BLEND_SOFTLIGHT +#define BLEND_MODE BLEND_HARDLIGHT +#endif + +/* note that this function does packing conversion and blending at the + * same time */ +static void +gst_videomixer_blend_ayuv_i420 (guint8 * src, gint xpos, gint ypos, + gint src_width, gint src_height, gdouble src_alpha, + guint8 * dest, gint dest_width, gint dest_height) +{ + gint dest_size; + gint alpha, b_alpha; + guint8 *destY1, *destY2, *destU, *destV; + gint accumU; + gint accumV; + gint i, j; + gint src_stride; + gint src_add, destY_add, destC_add; + guint8 *src1, *src2; + gint Y, U, V; + + src_stride = src_width * 4; + dest_size = dest_width * dest_height; + + b_alpha = (gint) (src_alpha * 255); + + /* adjust src pointers for negative sizes */ + if (xpos < 0) { + src += -xpos * 4; + src_width -= -xpos; + xpos = 0; + } + if (ypos < 0) { + src += -ypos * src_stride; + src_height -= -ypos; + ypos = 0; + } + /* adjust width/height if the src is bigger than dest */ + if (xpos + src_width > dest_width) { + src_width = dest_width - xpos; + } + if (ypos + src_height > dest_height) { + src_height = dest_height - ypos; + } + + src_add = 2 * src_stride - (4 * src_width); + destY_add = 2 * dest_width - (src_width); + destC_add = dest_width / 2 - (src_width / 2); + + destY1 = dest + xpos + (ypos * dest_width); + destY2 = destY1 + dest_width; + destU = dest + dest_size + xpos / 2 + (ypos / 2 * dest_width / 2); + destV = destU + dest_size / 4; + + src1 = src; + src2 = src + src_stride; + + /* we convert a square of 2x2 samples to generate 4 Luma and 2 chroma samples */ + for (i = 0; i < src_height / 2; i++) { + for (j = 0; j < src_width / 2; j++) { + alpha = (src1[0] * b_alpha) >> 8; + BLEND_MODE (destY1[0], destU[0], destV[0], src1[1], src1[2], src1[3], + alpha, Y, U, V); + destY1[0] = Y; + accumU = U; + accumV = V; + alpha = (src1[4] * b_alpha) >> 8; + BLEND_MODE (destY1[1], destU[0], destV[0], src1[5], src1[6], src1[7], + alpha, Y, U, V); + destY1[1] = Y; + accumU += U; + accumV += V; + alpha = (src2[0] * b_alpha) >> 8; + BLEND_MODE (destY2[0], destU[0], destV[0], src2[1], src2[2], src2[3], + alpha, Y, U, V); + destY2[0] = Y; + accumU += U; + accumV += V; + alpha = (src2[4] * b_alpha) >> 8; + BLEND_MODE (destY2[1], destU[0], destV[0], src2[5], src2[6], src2[7], + alpha, Y, U, V); + destY2[1] = Y; + accumU += U; + accumV += V; + + /* take the average of the 4 chroma samples to get the final value */ + destU[0] = accumU / 4; + destV[0] = accumV / 4; + + src1 += 8; + src2 += 8; + destY1 += 2; + destY2 += 2; + destU += 1; + destV += 1; + } + src1 += src_add; + src2 += src_add; + destY1 += destY_add; + destY2 += destY_add; + destU += destC_add; + destV += destC_add; + } +} + +#undef BLEND_MODE + +/* fill a buffer with a checkerboard pattern */ +static void +gst_videomixer_fill_checker (guint8 * dest, gint width, gint height) +{ + gint size = width * height; + gint i, j; + static int tab[] = { 80, 160, 80, 160 }; + + for (i = 0; i < height; i++) { + for (j = 0; j < width; j++) { + *dest++ = tab[((i & 0x8) >> 3) + ((j & 0x8) >> 3)]; + } + } + memset (dest, 128, size / 2); +} + +static void +gst_videomixer_fill_color (guint8 * dest, gint width, gint height, + gint colY, gint colU, gint colV) +{ + gint size = width * height; + + memset (dest, colY, size); + memset (dest + size, colU, size / 4); + memset (dest + size + size / 4, colV, size / 4); +} + +/* try to get a buffer on all pads. As long as the queued value is + * negative, we skip buffers */ +static gboolean +gst_videomixer_fill_queues (GstVideoMixer * mix) +{ + GSList *walk; + gboolean eos = TRUE; + + /* loop over all pads and fill it with a buffer */ + walk = mix->sinkpads; + while (walk) { + GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data); + + walk = g_slist_next (walk); + + GST_DEBUG ("looking at pad %s", gst_pad_get_name (GST_PAD (pad))); + + /* don't care about eos pads */ + if (pad->eos) { + GST_DEBUG ("pad %s in eos, skipping", gst_pad_get_name (GST_PAD (pad))); + continue; + } + + GST_DEBUG ("pad %s: buffer %p, queued %lld ", + gst_pad_get_name (GST_PAD (pad)), pad->buffer, pad->queued); + + /* this pad is in need of a new buffer */ + if (pad->buffer == NULL) { + GstData *data; + GstBuffer *buffer; + + /* as long as not enough buffers have been queued */ + while (pad->queued <= 0 && !pad->eos) { + data = gst_pad_pull (GST_PAD (pad)); + if (GST_IS_EVENT (data)) { + GstEvent *event = GST_EVENT (data); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: + GST_DEBUG ("videomixer: EOS on pad %s", + gst_pad_get_name (GST_PAD (pad))); + /* mark pad eos */ + pad->eos = TRUE; + gst_event_unref (event); + break; + default: + gst_pad_event_default (GST_PAD (pad), GST_EVENT (data)); + } + } else { + guint64 duration; + + buffer = GST_BUFFER (data); + duration = GST_BUFFER_DURATION (buffer); + /* no duration on the buffer, use the framerate */ + if (duration == -1) + duration = GST_SECOND / pad->in_framerate; + pad->queued += duration; + /* this buffer will need to be mixed */ + if (pad->queued > 0) { + pad->buffer = buffer; + } else { + /* skip buffer, it's too old */ + gst_buffer_unref (buffer); + } + } + GST_DEBUG ("pad %s: in loop, buffer %p, queued %lld ", + gst_pad_get_name (GST_PAD (pad)), pad->buffer, pad->queued); + } + } + if (pad->buffer != NULL) { + /* got a buffer somewhere so were not eos */ + eos = FALSE; + } + } + return eos; +} + +/* blend all buffers present on the pads */ +static void +gst_videomixer_blend_buffers (GstVideoMixer * mix, GstBuffer * outbuf) +{ + GSList *walk; + + walk = mix->sinkpads; + while (walk) { + GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data); + + walk = g_slist_next (walk); + + if (pad->buffer != NULL) { + gst_videomixer_blend_ayuv_i420 (GST_BUFFER_DATA (pad->buffer), + pad->xpos, pad->ypos, + pad->in_width, pad->in_height, + pad->alpha, + GST_BUFFER_DATA (outbuf), mix->out_width, mix->out_height); + } + } +} + +/* remove buffers from the queue that were expired in the + * interval of the master, we also prepare the queued value + * in the pad so that we can skip and fill buffers later on */ +static void +gst_videomixer_update_queues (GstVideoMixer * mix) +{ + GSList *walk; + guint64 interval; + + interval = mix->master->queued; + if (interval <= 0) { + interval = GST_SECOND / mix->in_framerate; + } + + walk = mix->sinkpads; + while (walk) { + GstVideoMixerPad *pad = GST_VIDEO_MIXER_PAD (walk->data); + + walk = g_slist_next (walk); + + if (pad->buffer != NULL) { + pad->queued -= interval; + GST_DEBUG ("queued now %s %lld", gst_pad_get_name (GST_PAD (pad)), + pad->queued); + if (pad->queued <= 0) { + gst_buffer_unref (pad->buffer); + pad->buffer = NULL; + } + } + } +} + +/* + * The basic idea is to get a buffer on all pads and mix them together. + * Based on the framerate, buffers are removed from the queues to make room + * for a new buffer. + */ +static void +gst_videomixer_loop (GstElement * element) +{ + GstVideoMixer *mix; + GstBuffer *outbuf; + gint outsize; + gint new_width, new_height; + gboolean eos; + + mix = GST_VIDEO_MIXER (element); + + eos = gst_videomixer_fill_queues (mix); + if (eos) { + gst_pad_push (mix->srcpad, GST_DATA (gst_event_new (GST_EVENT_EOS))); + gst_element_set_eos (GST_ELEMENT (mix)); + return; + } + + new_width = mix->in_width; + new_height = mix->in_height; + + if (new_width != mix->out_width || + new_height != mix->out_height || !GST_PAD_CAPS (mix->srcpad)) { + GstCaps *newcaps; + + newcaps = + gst_caps_copy (gst_pad_get_negotiated_caps (GST_PAD (mix->master))); + gst_caps_set_simple (newcaps, "format", GST_TYPE_FOURCC, + GST_STR_FOURCC ("I420"), "width", G_TYPE_INT, new_width, "height", + G_TYPE_INT, new_height, NULL); + + if (!gst_pad_try_set_caps (mix->srcpad, newcaps)) { + GST_ELEMENT_ERROR (mix, CORE, NEGOTIATION, (NULL), (NULL)); + return; + } + + mix->out_width = new_width; + mix->out_height = new_height; + } + + outsize = 3 * (mix->out_width * mix->out_height) / 2; + outbuf = gst_pad_alloc_buffer (mix->srcpad, GST_BUFFER_OFFSET_NONE, outsize); + switch (mix->background) { + case VIDEO_MIXER_BACKGROUND_CHECKER: + gst_videomixer_fill_checker (GST_BUFFER_DATA (outbuf), + new_width, new_height); + break; + case VIDEO_MIXER_BACKGROUND_BLACK: + gst_videomixer_fill_color (GST_BUFFER_DATA (outbuf), + new_width, new_height, 16, 128, 128); + break; + case VIDEO_MIXER_BACKGROUND_WHITE: + gst_videomixer_fill_color (GST_BUFFER_DATA (outbuf), + new_width, new_height, 240, 128, 128); + break; + } + + gst_videomixer_blend_buffers (mix, outbuf); + + gst_videomixer_update_queues (mix); + + gst_pad_push (mix->srcpad, GST_DATA (outbuf)); +} + +static void +gst_videomixer_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstVideoMixer *mix = GST_VIDEO_MIXER (object); + + switch (prop_id) { + case ARG_BACKGROUND: + g_value_set_enum (value, mix->background); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_videomixer_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstVideoMixer *mix = GST_VIDEO_MIXER (object); + + switch (prop_id) { + case ARG_BACKGROUND: + mix->background = g_value_get_enum (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstElementStateReturn +gst_videomixer_change_state (GstElement * element) +{ + GstVideoMixer *mix; + gint transition = GST_STATE_TRANSITION (element); + + g_return_val_if_fail (GST_IS_VIDEO_MIXER (element), GST_STATE_FAILURE); + + mix = GST_VIDEO_MIXER (element); + + switch (transition) { + case GST_STATE_NULL_TO_READY: + case GST_STATE_READY_TO_PAUSED: + break; + case GST_STATE_PAUSED_TO_PLAYING: + case GST_STATE_PLAYING_TO_PAUSED: + case GST_STATE_PAUSED_TO_READY: + case GST_STATE_READY_TO_NULL: + break; + } + + if (GST_ELEMENT_CLASS (parent_class)->change_state) + return GST_ELEMENT_CLASS (parent_class)->change_state (element); + + return GST_STATE_SUCCESS; +} + +static gboolean +plugin_init (GstPlugin * plugin) +{ + GST_DEBUG_CATEGORY_INIT (gst_videomixer_debug, "videomixer", 0, + "video mixer"); + + return gst_element_register (plugin, "videomixer", GST_RANK_PRIMARY, + GST_TYPE_VIDEO_MIXER); +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "videomixer", + "Video mixer", plugin_init, VERSION, GST_LICENSE, GST_PACKAGE, GST_ORIGIN) |