summaryrefslogtreecommitdiffstats
path: root/gst
diff options
context:
space:
mode:
Diffstat (limited to 'gst')
-rw-r--r--gst/debug/Makefile.am8
-rw-r--r--gst/debug/gstnavigationtest.c302
-rw-r--r--gst/debug/gstnavigationtest.h19
-rw-r--r--gst/effectv/Makefile.am2
-rw-r--r--gst/effectv/gstaging.c253
-rw-r--r--gst/effectv/gstdice.c307
-rw-r--r--gst/effectv/gstedge.c263
-rw-r--r--gst/effectv/gsteffectv.c34
-rw-r--r--gst/effectv/gsteffectv.h3
-rw-r--r--gst/effectv/gstquark.c328
-rw-r--r--gst/effectv/gstrev.c251
-rw-r--r--gst/effectv/gstshagadelic.c258
-rw-r--r--gst/effectv/gstvertigo.c304
-rw-r--r--gst/effectv/gstwarp.c315
-rw-r--r--gst/videofilter/Makefile.am16
-rw-r--r--gst/videofilter/gstvideobalance.c12
-rw-r--r--gst/videofilter/gstvideobalance.h4
-rw-r--r--gst/videofilter/gstvideofilter.c484
-rw-r--r--gst/videofilter/gstvideofilter.h59
-rw-r--r--gst/videofilter/gstvideoflip.c613
-rw-r--r--gst/videofilter/gstvideoflip.h13
21 files changed, 1699 insertions, 2149 deletions
diff --git a/gst/debug/Makefile.am b/gst/debug/Makefile.am
index da3b3a29..14bb3699 100644
--- a/gst/debug/Makefile.am
+++ b/gst/debug/Makefile.am
@@ -14,8 +14,12 @@ libgstefence_la_LIBADD = $(GST_LIBS)
libgstefence_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstnavigationtest_la_SOURCES = gstnavigationtest.c
-libgstnavigationtest_la_CFLAGS = $(GST_CFLAGS) -I$(top_srcdir)/gst/videofilter
-libgstnavigationtest_la_LIBADD = $(GST_LIBS) $(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
+libgstnavigationtest_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
+ $(GST_PLUGINS_BASE_CFLAGS) \
+ -I$(top_srcdir)/gst/videofilter
+libgstnavigationtest_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
+ $(GST_PLUGINS_BASE_LIBS) \
+ $(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgstnavigationtest_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdebug_la_SOURCES = \
diff --git a/gst/debug/gstnavigationtest.c b/gst/debug/gstnavigationtest.c
index 220947a8..bc40ae58 100644
--- a/gst/debug/gstnavigationtest.c
+++ b/gst/debug/gstnavigationtest.c
@@ -18,122 +18,40 @@
* Boston, MA 02111-1307, USA.
*/
-/*
- * This file was (probably) generated from gstnavigationtest.c,
- * gstnavigationtest.c,v 1.7 2003/11/08 02:48:59 dschleef Exp
- */
-
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
-#include <gstnavigationtest.h>
+#include "gstnavigationtest.h"
#include <string.h>
#include <math.h>
-typedef struct
-{
- double x;
- double y;
- gint images_left;
- guint8 cy, cu, cv;
-} ButtonClick;
-
-static void gst_navigationtest_base_init (gpointer g_class);
-static void gst_navigationtest_class_init (gpointer g_class,
- gpointer class_data);
-static void gst_navigationtest_init (GTypeInstance * instance,
- gpointer g_class);
-
-static gboolean gst_navigationtest_handle_src_event (GstPad * pad,
- GstEvent * event);
-
-static GstStateChangeReturn
-gst_navigationtest_change_state (GstElement * element,
- GstStateChange transition);
-
-static void gst_navigationtest_planar411 (GstVideofilter * videofilter,
- void *dest, void *src);
-static void gst_navigationtest_setup (GstVideofilter * videofilter);
-
-static GstVideofilterClass *parent_class; /* NULL */
-
-GType
-gst_navigationtest_get_type (void)
-{
- static GType navigationtest_type = 0;
-
- if (!navigationtest_type) {
- static const GTypeInfo navigationtest_info = {
- sizeof (GstNavigationtestClass),
- gst_navigationtest_base_init,
- NULL,
- gst_navigationtest_class_init,
- NULL,
- NULL,
- sizeof (GstNavigationtest),
- 0,
- gst_navigationtest_init,
- };
-
- navigationtest_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
- "GstNavigationtest", &navigationtest_info, 0);
- }
- return navigationtest_type;
-}
-
-static GstVideofilterFormat gst_navigationtest_formats[] = {
- {"I420", 12, gst_navigationtest_planar411,},
-};
-
-
-static void
-gst_navigationtest_base_init (gpointer g_class)
-{
- static GstElementDetails navigationtest_details =
- GST_ELEMENT_DETAILS ("Video Filter Template",
- "Filter/Video",
- "Template for a video filter",
- "David Schleef <ds@schleef.org>");
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
+#include <gst/video/video.h>
- gst_element_class_set_details (element_class, &navigationtest_details);
+GST_DEBUG_CATEGORY (navigationtest_debug);
+#define GST_CAT_DEFAULT navigationtest_debug
- for (i = 0; i < G_N_ELEMENTS (gst_navigationtest_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_navigationtest_formats + i);
- }
+static GstElementDetails navigationtest_details =
+GST_ELEMENT_DETAILS ("Video Navigation test",
+ "Filter/Effect/Video",
+ "Handle navigation events showing a black square following mouse pointer",
+ "David Schleef <ds@schleef.org>");
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
-}
+static GstStaticPadTemplate gst_navigationtest_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
+ );
-static void
-gst_navigationtest_class_init (gpointer g_class, gpointer class_data)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
+static GstStaticPadTemplate gst_navigationtest_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420"))
+ );
- parent_class = g_type_class_peek_parent (g_class);
-
- element_class->change_state = gst_navigationtest_change_state;
-
- videofilter_class->setup = gst_navigationtest_setup;
-}
-
-static void
-gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
-{
- GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
- GstVideofilter *videofilter = GST_VIDEOFILTER (navtest);
-
- gst_pad_set_event_function (videofilter->srcpad,
- GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
-
- navtest->x = -1;
- navtest->y = -1;
-}
+static GstVideofilterClass *parent_class = NULL;
static gboolean
gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
@@ -149,10 +67,8 @@ gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
const GstStructure *s = gst_event_get_structure (event);
gint fps_n, fps_d;
- fps_n = gst_value_get_fraction_numerator (
- (&GST_VIDEOFILTER (navtest)->framerate));
- fps_d = gst_value_get_fraction_denominator (
- (&GST_VIDEOFILTER (navtest)->framerate));
+ fps_n = gst_value_get_fraction_numerator ((&navtest->framerate));
+ fps_d = gst_value_get_fraction_denominator ((&navtest->framerate));
type = gst_structure_get_string (s, "event");
if (g_str_equal (type, "mouse-move")) {
@@ -189,16 +105,63 @@ gst_navigationtest_handle_src_event (GstPad * pad, GstEvent * event)
return gst_pad_event_default (pad, event);
}
-static void
-gst_navigationtest_setup (GstVideofilter * videofilter)
+/* Useful macros */
+#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
+#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
+#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
+
+#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
+#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
+#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+
+#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+
+static gboolean
+gst_navigationtest_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- GstNavigationtest *navigationtest;
+ GstNavigationtest *navtest;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- g_return_if_fail (GST_IS_NAVIGATIONTEST (videofilter));
- navigationtest = GST_NAVIGATIONTEST (videofilter);
+ navtest = GST_NAVIGATIONTEST (btrans);
- /* if any setup needs to be done, do it here */
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = GST_VIDEO_I420_SIZE (width, height);
+ ret = TRUE;
+ GST_DEBUG_OBJECT (navtest, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
+ return ret;
+}
+
+static gboolean
+gst_navigationtest_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
+{
+ GstNavigationtest *navtest = GST_NAVIGATIONTEST (btrans);
+ gboolean ret = FALSE;
+ GstStructure *structure;
+
+ structure = gst_caps_get_structure (incaps, 0);
+
+ if (gst_structure_get_int (structure, "width", &navtest->width) &&
+ gst_structure_get_int (structure, "height", &navtest->height)) {
+ const GValue *framerate;
+
+ framerate = gst_structure_get_value (structure, "framerate");
+ if (framerate && GST_VALUE_HOLDS_FRACTION (framerate)) {
+ g_value_copy (framerate, &navtest->framerate);
+ ret = TRUE;
+ }
+ }
+
+ return ret;
}
static void
@@ -242,37 +205,37 @@ draw_box_planar411 (guint8 * dest, int width, int height, int x, int y,
}
}
-static void
-gst_navigationtest_planar411 (GstVideofilter * videofilter,
- void *dest, void *src)
+static GstFlowReturn
+gst_navigationtest_transform (GstBaseTransform * trans, GstBuffer * in,
+ GstBuffer * out)
{
- GstNavigationtest *navtest = (GstNavigationtest *) videofilter;
- gint width, height;
+ GstNavigationtest *navtest = GST_NAVIGATIONTEST (trans);
GSList *walk;
-
- g_return_if_fail (GST_IS_NAVIGATIONTEST (videofilter));
-
- width = gst_videofilter_get_input_width (videofilter);
- height = gst_videofilter_get_input_height (videofilter);
+ GstFlowReturn ret = GST_FLOW_OK;
/* do something interesting here. This simply copies the source
* to the destination. */
- memcpy (dest, src, width * height + (width / 2) * (height / 2) * 2);
+ gst_buffer_stamp (out, in);
+
+ memcpy (GST_BUFFER_DATA (out), GST_BUFFER_DATA (in),
+ MIN (GST_BUFFER_SIZE (in), GST_BUFFER_SIZE (out)));
walk = navtest->clicks;
while (walk) {
ButtonClick *click = walk->data;
walk = g_slist_next (walk);
- draw_box_planar411 (dest, width, height, rint (click->x),
- rint (click->y), click->cy, click->cu, click->cv);
+ draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
+ rint (click->x), rint (click->y), click->cy, click->cu, click->cv);
if (--click->images_left < 1) {
navtest->clicks = g_slist_remove (navtest->clicks, click);
g_free (click);
}
}
- draw_box_planar411 (dest, width, height, rint (navtest->x),
- rint (navtest->y), 0, 128, 128);
+ draw_box_planar411 (GST_BUFFER_DATA (out), navtest->width, navtest->height,
+ rint (navtest->x), rint (navtest->y), 0, 128, 128);
+
+ return ret;
}
static GstStateChangeReturn
@@ -282,12 +245,6 @@ gst_navigationtest_change_state (GstElement * element,
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstNavigationtest *navtest = GST_NAVIGATIONTEST (element);
- /* upwards state changes */
- switch (transition) {
- default:
- break;
- }
-
if (GST_ELEMENT_CLASS (parent_class)->change_state)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
@@ -307,9 +264,84 @@ gst_navigationtest_change_state (GstElement * element,
return ret;
}
+static void
+gst_navigationtest_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &navigationtest_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_navigationtest_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_navigationtest_src_template));
+}
+
+static void
+gst_navigationtest_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ element_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_navigationtest_change_state);
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_navigationtest_set_caps);
+ trans_class->get_unit_size =
+ GST_DEBUG_FUNCPTR (gst_navigationtest_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_navigationtest_transform);
+}
+
+static void
+gst_navigationtest_init (GTypeInstance * instance, gpointer g_class)
+{
+ GstNavigationtest *navtest = GST_NAVIGATIONTEST (instance);
+ GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
+
+ gst_pad_set_event_function (btrans->srcpad,
+ GST_DEBUG_FUNCPTR (gst_navigationtest_handle_src_event));
+
+ navtest->x = -1;
+ navtest->y = -1;
+}
+
+GType
+gst_navigationtest_get_type (void)
+{
+ static GType navigationtest_type = 0;
+
+ if (!navigationtest_type) {
+ static const GTypeInfo navigationtest_info = {
+ sizeof (GstNavigationtestClass),
+ gst_navigationtest_base_init,
+ NULL,
+ gst_navigationtest_class_init,
+ NULL,
+ NULL,
+ sizeof (GstNavigationtest),
+ 0,
+ gst_navigationtest_init,
+ };
+
+ navigationtest_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
+ "GstNavigationtest", &navigationtest_info, 0);
+ }
+ return navigationtest_type;
+}
+
static gboolean
plugin_init (GstPlugin * plugin)
{
+ GST_DEBUG_CATEGORY_INIT (navigationtest_debug, "navigationtest", 0,
+ "navigationtest");
+
return gst_element_register (plugin, "navigationtest", GST_RANK_NONE,
GST_TYPE_NAVIGATIONTEST);
}
diff --git a/gst/debug/gstnavigationtest.h b/gst/debug/gstnavigationtest.h
index 4211fc3c..98abcd81 100644
--- a/gst/debug/gstnavigationtest.h
+++ b/gst/debug/gstnavigationtest.h
@@ -21,12 +21,8 @@
#ifndef __GST_NAVIGATIONTEST_H__
#define __GST_NAVIGATIONTEST_H__
-
-#include <gst/gst.h>
-
#include "gstvideofilter.h"
-
G_BEGIN_DECLS
#define GST_TYPE_NAVIGATIONTEST \
@@ -43,11 +39,21 @@ G_BEGIN_DECLS
typedef struct _GstNavigationtest GstNavigationtest;
typedef struct _GstNavigationtestClass GstNavigationtestClass;
+typedef struct
+{
+ gdouble x;
+ gdouble y;
+ gint images_left;
+ guint8 cy, cu, cv;
+} ButtonClick;
+
struct _GstNavigationtest {
GstVideofilter videofilter;
- double x;
- double y;
+ gint width, height;
+
+ GValue framerate;
+ gdouble x, y;
GSList *clicks;
};
@@ -61,4 +67,3 @@ GType gst_navigationtest_get_type(void);
G_END_DECLS
#endif /* __GST_NAVIGATIONTEST_H__ */
-
diff --git a/gst/effectv/Makefile.am b/gst/effectv/Makefile.am
index 1413526e..2bdf7b77 100644
--- a/gst/effectv/Makefile.am
+++ b/gst/effectv/Makefile.am
@@ -5,10 +5,12 @@ libgsteffectv_la_SOURCES = \
gstshagadelic.c gstvertigo.c gstrev.c gstquark.c
libgsteffectv_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
+ $(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \
-I$(top_srcdir)/gst/videofilter
libgsteffectv_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) \
+ $(GST_BASE_LIBS) \
$(GST_LIBS) \
$(top_builddir)/gst/videofilter/libgstvideofilter-@GST_MAJORMINOR@.la
libgsteffectv_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
diff --git a/gst/effectv/gstaging.c b/gst/effectv/gstaging.c
index 97bef900..3d70fbb3 100644
--- a/gst/effectv/gstaging.c
+++ b/gst/effectv/gstaging.c
@@ -37,11 +37,13 @@
#include "config.h"
#endif
-#include <gst/gst.h>
#include <gstvideofilter.h>
+
#include <string.h>
#include <math.h>
+#include <gst/video/video.h>
+
#define GST_TYPE_AGINGTV \
(gst_agingtv_get_type())
#define GST_AGINGTV(obj) \
@@ -89,128 +91,69 @@ struct _GstAgingTVClass
GstVideofilterClass parent_class;
};
-/* GstAgingTV signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- ARG_0
- /* FILL ME */
-};
+GType gst_agingtv_get_type (void);
-static void gst_agingtv_base_init (gpointer g_class);
-static void gst_agingtv_class_init (gpointer g_class, gpointer class_data);
-static void gst_agingtv_init (GTypeInstance * instance, gpointer g_class);
-static void gst_agingtv_setup (GstVideofilter * videofilter);
+static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV",
+ "Filter/Effect/Video",
+ "AgingTV adds age to video input using scratches and dust",
+ "Sam Lantinga <slouken@devolution.com>");
-static void gst_agingtv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_agingtv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
+static GstStaticPadTemplate gst_agingtv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
-GType
-gst_agingtv_get_type (void)
-{
- static GType agingtv_type = 0;
+static GstStaticPadTemplate gst_agingtv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
- if (!agingtv_type) {
- static const GTypeInfo agingtv_info = {
- sizeof (GstAgingTVClass),
- gst_agingtv_base_init,
- NULL,
- gst_agingtv_class_init,
- NULL,
- NULL,
- sizeof (GstAgingTV),
- 0,
- gst_agingtv_init,
- };
+static GstVideofilterClass *parent_class = NULL;
- agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
- "GstAgingTV", &agingtv_info, 0);
- }
- return agingtv_type;
-}
-
-static GstVideofilterFormat gst_agingtv_formats[] = {
- {"RGB ", 32, gst_agingtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000}
-};
-
-static void
-gst_agingtv_base_init (gpointer g_class)
+static gboolean
+gst_agingtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GstElementDetails agingtv_details = GST_ELEMENT_DETAILS ("AgingTV",
- "Filter/Effect/Video",
- "AgingTV adds age to video input using scratches and dust",
- "Sam Lantinga <slouken@devolution.com>");
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
+ GstAgingTV *filter = GST_AGINGTV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
- gst_element_class_set_details (element_class, &agingtv_details);
+ structure = gst_caps_get_structure (incaps, 0);
- for (i = 0; i < G_N_ELEMENTS (gst_agingtv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_agingtv_formats + i);
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ ret = TRUE;
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
+ return ret;
}
-static void
-gst_agingtv_class_init (gpointer g_class, gpointer class_data)
-{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
-
- gobject_class = G_OBJECT_CLASS (g_class);
- videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
-
- gobject_class->set_property = gst_agingtv_set_property;
- gobject_class->get_property = gst_agingtv_get_property;
-
-#if 0
- g_object_class_install_property (gobject_class, ARG_METHOD,
- g_param_spec_enum ("method", "method", "method",
- GST_TYPE_AGINGTV_METHOD, GST_AGINGTV_METHOD_1, G_PARAM_READWRITE));
-#endif
-
- videofilter_class->setup = gst_agingtv_setup;
-}
-
-static void
-gst_agingtv_init (GTypeInstance * instance, gpointer g_class)
+static gboolean
+gst_agingtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- GstAgingTV *agingtv = GST_AGINGTV (instance);
- GstVideofilter *videofilter;
-
- GST_DEBUG ("gst_agingtv_init");
-
- videofilter = GST_VIDEOFILTER (agingtv);
-
- /* do stuff */
-}
+ GstAgingTV *filter;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
-static void
-gst_agingtv_setup (GstVideofilter * videofilter)
-{
- GstAgingTV *agingtv;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
+ filter = GST_AGINGTV (btrans);
- g_return_if_fail (GST_IS_AGINGTV (videofilter));
- agingtv = GST_AGINGTV (videofilter);
+ structure = gst_caps_get_structure (caps, 0);
- /* if any setup needs to be done, do it here */
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
- agingtv->width = width;
- agingtv->height = height;
+ return ret;
}
static unsigned int
@@ -359,69 +302,89 @@ pits (guint32 * dest, gint width, gint height, gint area_scale,
}
}
-static void
-gst_agingtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_agingtv_transform (GstBaseTransform * trans, GstBuffer * in,
+ GstBuffer * out)
{
- GstAgingTV *agingtv;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
+ GstAgingTV *agingtv = GST_AGINGTV (trans);
+ gint width = agingtv->width;
+ gint height = agingtv->height;
int video_size = width * height;
- guint32 *src = s;
- guint32 *dest = d;
+ guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
+ guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint area_scale = width * height / 64 / 480;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ gst_buffer_stamp (out, in);
if (area_scale <= 0)
area_scale = 1;
- g_return_if_fail (GST_IS_AGINGTV (videofilter));
- agingtv = GST_AGINGTV (videofilter);
-
coloraging (src, dest, video_size);
scratching (agingtv->scratches, agingtv->scratch_lines, dest, width, height);
pits (dest, width, height, area_scale, agingtv->pits_interval);
if (area_scale > 1)
dusts (dest, width, height, agingtv->dust_interval, area_scale);
+ return ret;
}
static void
-gst_agingtv_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
+gst_agingtv_base_init (gpointer g_class)
{
- GstAgingTV *src;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- g_return_if_fail (GST_IS_AGINGTV (object));
- src = GST_AGINGTV (object);
+ gst_element_class_set_details (element_class, &agingtv_details);
- GST_DEBUG ("gst_agingtv_set_property");
- switch (prop_id) {
-#if 0
- case ARG_METHOD:
- src->method = g_value_get_enum (value);
- break;
-#endif
- default:
- break;
- }
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_agingtv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_agingtv_src_template));
}
static void
-gst_agingtv_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
+gst_agingtv_class_init (gpointer klass, gpointer class_data)
{
- GstAgingTV *src;
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
- g_return_if_fail (GST_IS_AGINGTV (object));
- src = GST_AGINGTV (object);
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
- switch (prop_id) {
-#if 0
- case ARG_METHOD:
- g_value_set_enum (value, src->method);
- break;
-#endif
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
+ parent_class = g_type_class_peek_parent (klass);
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_agingtv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_agingtv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_agingtv_transform);
+}
+
+static void
+gst_agingtv_init (GTypeInstance * instance, gpointer g_class)
+{
+}
+
+GType
+gst_agingtv_get_type (void)
+{
+ static GType agingtv_type = 0;
+
+ if (!agingtv_type) {
+ static const GTypeInfo agingtv_info = {
+ sizeof (GstAgingTVClass),
+ gst_agingtv_base_init,
+ NULL,
+ gst_agingtv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstAgingTV),
+ 0,
+ gst_agingtv_init,
+ };
+
+ agingtv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
+ "GstAgingTV", &agingtv_info, 0);
}
+ return agingtv_type;
}
diff --git a/gst/effectv/gstdice.c b/gst/effectv/gstdice.c
index e3506e73..3d2376c5 100644
--- a/gst/effectv/gstdice.c
+++ b/gst/effectv/gstdice.c
@@ -13,9 +13,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
+
+#include <gstvideofilter.h>
+
#include <string.h>
#include <gst/gst.h>
-#include <gstvideofilter.h>
+
+#include <gst/video/video.h>
#define GST_TYPE_DICETV \
(gst_dicetv_get_type())
@@ -60,164 +64,85 @@ struct _GstDiceTV
struct _GstDiceTVClass
{
GstVideofilterClass parent_class;
-
- void (*reset) (GstElement * element);
-};
-
-/* Filter signals and args */
-enum
-{
- /* FILL ME */
- RESET_SIGNAL,
- LAST_SIGNAL
};
-enum
-{
- ARG_0,
- ARG_CUBE_BITS
-};
+GType gst_dicetv_get_type (void);
-static void gst_dicetv_base_init (gpointer g_class);
-static void gst_dicetv_class_init (gpointer g_class, gpointer class_data);
-static void gst_dicetv_init (GTypeInstance * instance, gpointer g_class);
-
-static void gst_dicetv_reset_handler (GstElement * elem);
static void gst_dicetv_create_map (GstDiceTV * filter);
-static void gst_dicetv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_dicetv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_dicetv_setup (GstVideofilter * videofilter);
-static void gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s);
-
-static guint gst_dicetv_signals[LAST_SIGNAL] = { 0 };
+static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV",
+ "Filter/Effect/Video",
+ "'Dices' the screen up into many small squares",
+ "Wim Taymans <wim.taymans@chello.be>");
+
+static GstStaticPadTemplate gst_dicetv_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
+ GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ );
+
+static GstStaticPadTemplate gst_dicetv_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
+ GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ );
+
+static GstVideofilterClass *parent_class = NULL;
-GType
-gst_dicetv_get_type (void)
+enum
{
- static GType dicetv_type = 0;
-
- if (!dicetv_type) {
- static const GTypeInfo dicetv_info = {
- sizeof (GstDiceTVClass),
- gst_dicetv_base_init,
- NULL,
- (GClassInitFunc) gst_dicetv_class_init,
- NULL,
- NULL,
- sizeof (GstDiceTV),
- 0,
- (GInstanceInitFunc) gst_dicetv_init,
- };
-
- dicetv_type =
- g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info,
- 0);
- }
- return dicetv_type;
-}
-
-static GstVideofilterFormat gst_dicetv_formats[] = {
- {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00,
- 0x000000ff},
- {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000,
- 0x0000ff00},
- {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00,
- 0x00ff0000},
- {"RGB ", 32, gst_dicetv_draw, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000},
+ ARG_0,
+ ARG_CUBE_BITS
};
-static void
-gst_dicetv_base_init (gpointer g_class)
+static gboolean
+gst_dicetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- /* elementfactory information */
- static GstElementDetails gst_dicetv_details = GST_ELEMENT_DETAILS ("DiceTV",
- "Filter/Effect/Video",
- "'Dices' the screen up into many small squares",
- "Wim Taymans <wim.taymans@chello.be>");
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
-
- gst_element_class_set_details (element_class, &gst_dicetv_details);
-
- for (i = 0; i < G_N_ELEMENTS (gst_dicetv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_dicetv_formats + i);
+ GstDiceTV *filter = GST_DICETV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
+
+ structure = gst_caps_get_structure (incaps, 0);
+
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ g_free (filter->dicemap);
+ filter->dicemap =
+ (gchar *) g_malloc (filter->height * filter->width * sizeof (char));
+ gst_dicetv_create_map (filter);
+ ret = TRUE;
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
-}
-
-static void
-gst_dicetv_class_init (gpointer g_class, gpointer class_data)
-{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
- GstDiceTVClass *dicetv_class;
-
- gobject_class = G_OBJECT_CLASS (g_class);
- videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- dicetv_class = GST_DICETV_CLASS (g_class);
-
- gst_dicetv_signals[RESET_SIGNAL] =
- g_signal_new ("reset",
- G_TYPE_FROM_CLASS (g_class),
- G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET (GstDiceTVClass, reset),
- NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
-
- dicetv_class->reset = gst_dicetv_reset_handler;
-
- gobject_class->set_property = gst_dicetv_set_property;
- gobject_class->get_property = gst_dicetv_get_property;
-
- g_object_class_install_property (gobject_class, ARG_CUBE_BITS,
- g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares",
- MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE));
-
- videofilter_class->setup = gst_dicetv_setup;
+ return ret;
}
-static void
-gst_dicetv_setup (GstVideofilter * videofilter)
+static gboolean
+gst_dicetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- GstDiceTV *dicetv;
-
- g_return_if_fail (GST_IS_DICETV (videofilter));
- dicetv = GST_DICETV (videofilter);
-
- dicetv->width = gst_videofilter_get_input_width (videofilter);
- dicetv->height = gst_videofilter_get_input_height (videofilter);
+ GstDiceTV *filter;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- g_free (dicetv->dicemap);
- dicetv->dicemap =
- (gchar *) g_malloc (dicetv->height * dicetv->width * sizeof (char));
- gst_dicetv_create_map (dicetv);
-}
+ filter = GST_DICETV (btrans);
-static void
-gst_dicetv_init (GTypeInstance * instance, gpointer g_class)
-{
- GstDiceTV *filter = GST_DICETV (instance);
+ structure = gst_caps_get_structure (caps, 0);
- filter->dicemap = NULL;
- filter->g_cube_bits = DEFAULT_CUBE_BITS;
- filter->g_cube_size = 0;
- filter->g_map_height = 0;
- filter->g_map_width = 0;
-}
-
-static void
-gst_dicetv_reset_handler (GstElement * element)
-{
- GstDiceTV *filter = GST_DICETV (element);
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
- gst_dicetv_create_map (filter);
+ return ret;
}
static unsigned int
@@ -228,23 +153,20 @@ fastrand (void)
return (fastrand_val = fastrand_val * 1103515245 + 12345);
}
-static void
-gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_dicetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstDiceTV *filter;
- guint32 *src;
- guint32 *dest;
- gint i;
- gint map_x, map_y, map_i;
- gint base;
- gint dx, dy, di;
- gint video_width;
- gint g_cube_bits;
- gint g_cube_size;
+ guint32 *src, *dest;
+ gint i, map_x, map_y, map_i, base, dx, dy, di;
+ gint video_width, g_cube_bits, g_cube_size;
+ GstFlowReturn ret = GST_FLOW_OK;
- filter = GST_DICETV (videofilter);
- src = (guint32 *) s;
- dest = (guint32 *) d;
+ filter = GST_DICETV (trans);
+ src = (guint32 *) GST_BUFFER_DATA (in);
+ dest = (guint32 *) GST_BUFFER_DATA (out);
+
+ gst_buffer_stamp (out, in);
video_width = filter->width;
g_cube_bits = filter->g_cube_bits;
@@ -304,6 +226,8 @@ gst_dicetv_draw (GstVideofilter * videofilter, void *d, void *s)
map_i++;
}
}
+
+ return ret;
}
static void
@@ -364,3 +288,78 @@ gst_dicetv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
+
+static void
+gst_dicetv_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &gst_dicetv_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_dicetv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_dicetv_src_template));
+}
+
+static void
+gst_dicetv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = gst_dicetv_set_property;
+ gobject_class->get_property = gst_dicetv_get_property;
+
+ g_object_class_install_property (gobject_class, ARG_CUBE_BITS,
+ g_param_spec_int ("square_bits", "Square Bits", "The size of the Squares",
+ MIN_CUBE_BITS, MAX_CUBE_BITS, DEFAULT_CUBE_BITS, G_PARAM_READWRITE));
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_dicetv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_dicetv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_dicetv_transform);
+}
+
+static void
+gst_dicetv_init (GTypeInstance * instance, gpointer g_class)
+{
+ GstDiceTV *filter = GST_DICETV (instance);
+
+ filter->dicemap = NULL;
+ filter->g_cube_bits = DEFAULT_CUBE_BITS;
+ filter->g_cube_size = 0;
+ filter->g_map_height = 0;
+ filter->g_map_width = 0;
+}
+
+GType
+gst_dicetv_get_type (void)
+{
+ static GType dicetv_type = 0;
+
+ if (!dicetv_type) {
+ static const GTypeInfo dicetv_info = {
+ sizeof (GstDiceTVClass),
+ gst_dicetv_base_init,
+ NULL,
+ (GClassInitFunc) gst_dicetv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstDiceTV),
+ 0,
+ (GInstanceInitFunc) gst_dicetv_init,
+ };
+
+ dicetv_type =
+ g_type_register_static (GST_TYPE_VIDEOFILTER, "GstDiceTV", &dicetv_info,
+ 0);
+ }
+ return dicetv_type;
+}
diff --git a/gst/effectv/gstedge.c b/gst/effectv/gstedge.c
index 257b1046..2ee53dbe 100644
--- a/gst/effectv/gstedge.c
+++ b/gst/effectv/gstedge.c
@@ -24,10 +24,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
-#include <string.h>
-#include <gst/gst.h>
+
#include <gstvideofilter.h>
+#include <string.h>
+
+#include <gst/video/video.h>
+
#define GST_TYPE_EDGETV \
(gst_edgetv_get_type())
#define GST_EDGETV(obj) \
@@ -57,146 +60,97 @@ struct _GstEdgeTVClass
GstVideofilterClass parent_class;
};
-/* Filter signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
+GType gst_edgetv_get_type (void);
-enum
-{
- ARG_0
-};
-
-static void gst_edgetv_base_init (gpointer g_class);
-static void gst_edgetv_class_init (gpointer g_class, gpointer class_data);
-static void gst_edgetv_init (GTypeInstance * instance, gpointer g_class);
+static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
+ "Filter/Effect/Video",
+ "Apply edge detect on video",
+ "Wim Taymans <wim.taymans@chello.be>");
-static void gst_edgetv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_edgetv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
+static GstStaticPadTemplate gst_edgetv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
-static void gst_edgetv_setup (GstVideofilter * videofilter);
-static void gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
+static GstStaticPadTemplate gst_edgetv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
-/*static guint gst_edgetv_signals[LAST_SIGNAL] = { 0 }; */
+static GstVideofilterClass *parent_class = NULL;
-GType
-gst_edgetv_get_type (void)
+static gboolean
+gst_edgetv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GType edgetv_type = 0;
-
- if (!edgetv_type) {
- static const GTypeInfo edgetv_info = {
- sizeof (GstEdgeTVClass),
- gst_edgetv_base_init,
- NULL,
- (GClassInitFunc) gst_edgetv_class_init,
- NULL,
- NULL,
- sizeof (GstEdgeTV),
- 0,
- (GInstanceInitFunc) gst_edgetv_init,
- };
-
- edgetv_type =
- g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
- 0);
+ GstEdgeTV *edgetv = GST_EDGETV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
+
+ structure = gst_caps_get_structure (incaps, 0);
+
+ if (gst_structure_get_int (structure, "width", &edgetv->width) &&
+ gst_structure_get_int (structure, "height", &edgetv->height)) {
+ edgetv->map_width = edgetv->width / 4;
+ edgetv->map_height = edgetv->height / 4;
+ edgetv->video_width_margin = edgetv->width % 4;
+
+ g_free (edgetv->map);
+ edgetv->map =
+ (guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
+ sizeof (guint32) * 2);
+ memset (edgetv->map, 0,
+ edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
+ ret = TRUE;
}
- return edgetv_type;
-}
-static GstVideofilterFormat gst_edgetv_formats[] = {
- {"RGB ", 32, gst_edgetv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000}
-};
-
-static void
-gst_edgetv_base_init (gpointer g_class)
-{
- /* elementfactory information */
- static GstElementDetails gst_edgetv_details = GST_ELEMENT_DETAILS ("EdgeTV",
- "Filter/Effect/Video",
- "Apply edge detect on video",
- "Wim Taymans <wim.taymans@chello.be>");
-
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
-
- gst_element_class_set_details (element_class, &gst_edgetv_details);
-
- for (i = 0; i < G_N_ELEMENTS (gst_edgetv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_edgetv_formats + i);
- }
-
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
+ return ret;
}
-static void
-gst_edgetv_class_init (gpointer g_class, gpointer class_data)
+static gboolean
+gst_edgetv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
-
- gobject_class = G_OBJECT_CLASS (g_class);
- videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
+ GstEdgeTV *filter;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- gobject_class->set_property = gst_edgetv_set_property;
- gobject_class->get_property = gst_edgetv_get_property;
+ filter = GST_EDGETV (btrans);
- videofilter_class->setup = gst_edgetv_setup;
-}
+ structure = gst_caps_get_structure (caps, 0);
-static void
-gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
-{
- GstEdgeTV *edgetv = GST_EDGETV (instance);
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
- edgetv->map = NULL;
+ return ret;
}
-static void
-gst_edgetv_setup (GstVideofilter * videofilter)
-{
- GstEdgeTV *edgetv;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
-
- g_return_if_fail (GST_IS_EDGETV (videofilter));
- edgetv = GST_EDGETV (videofilter);
-
- edgetv->width = width;
- edgetv->height = height;
- edgetv->map_width = width / 4;
- edgetv->map_height = height / 4;
- edgetv->video_width_margin = width % 4;
-
- g_free (edgetv->map);
- edgetv->map =
- (guint32 *) g_malloc (edgetv->map_width * edgetv->map_height *
- sizeof (guint32) * 2);
- memset (edgetv->map, 0,
- edgetv->map_width * edgetv->map_height * sizeof (guint32) * 2);
-}
-
-static void
-gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_edgetv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstEdgeTV *filter;
- int x, y;
- int r, g, b;
+ gint x, y, r, g, b;
guint32 *src, *dest;
guint32 p, q;
guint32 v0, v1, v2, v3;
+ GstFlowReturn ret = GST_FLOW_OK;
- filter = GST_EDGETV (videofilter);
+ filter = GST_EDGETV (trans);
- src = (guint32 *) s;
- dest = (guint32 *) d;
+ gst_buffer_stamp (out, in);
+
+ src = (guint32 *) GST_BUFFER_DATA (in);
+ dest = (guint32 *) GST_BUFFER_DATA (out);
src += filter->width * 4 + 4;
dest += filter->width * 4 + 4;
@@ -207,7 +161,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
p = *src;
q = *(src - 4);
-/* difference between the current pixel and right neighbor. */
+ /* difference between the current pixel and right neighbor. */
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8;
b = (p & 0xff) - (q & 0xff);
@@ -225,7 +179,7 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
b = 255;
v2 = (r << 17) | (g << 9) | b;
-/* difference between the current pixel and upper neighbor. */
+ /* difference between the current pixel and upper neighbor. */
q = *(src - filter->width * 4);
r = ((p & 0xff0000) - (q & 0xff0000)) >> 16;
g = ((p & 0xff00) - (q & 0xff00)) >> 8;
@@ -275,37 +229,70 @@ gst_edgetv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
src += filter->width * 3 + 8 + filter->video_width_margin;
dest += filter->width * 3 + 8 + filter->video_width_margin;
}
+
+ return ret;
}
static void
-gst_edgetv_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
+gst_edgetv_base_init (gpointer g_class)
{
- GstEdgeTV *filter;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- g_return_if_fail (GST_IS_EDGETV (object));
+ gst_element_class_set_details (element_class, &gst_edgetv_details);
- filter = GST_EDGETV (object);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_edgetv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_edgetv_src_template));
+}
- switch (prop_id) {
- default:
- break;
- }
+static void
+gst_edgetv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_edgetv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_edgetv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_edgetv_transform);
}
static void
-gst_edgetv_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
+gst_edgetv_init (GTypeInstance * instance, gpointer g_class)
{
- GstEdgeTV *filter;
+ GstEdgeTV *edgetv = GST_EDGETV (instance);
- g_return_if_fail (GST_IS_EDGETV (object));
+ edgetv->map = NULL;
+}
- filter = GST_EDGETV (object);
+GType
+gst_edgetv_get_type (void)
+{
+ static GType edgetv_type = 0;
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
+ if (!edgetv_type) {
+ static const GTypeInfo edgetv_info = {
+ sizeof (GstEdgeTVClass),
+ gst_edgetv_base_init,
+ NULL,
+ (GClassInitFunc) gst_edgetv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstEdgeTV),
+ 0,
+ (GInstanceInitFunc) gst_edgetv_init,
+ };
+
+ edgetv_type =
+ g_type_register_static (GST_TYPE_VIDEOFILTER, "GstEdgeTV", &edgetv_info,
+ 0);
}
+ return edgetv_type;
}
diff --git a/gst/effectv/gsteffectv.c b/gst/effectv/gsteffectv.c
index 5a051b63..bd8b6a04 100644
--- a/gst/effectv/gsteffectv.c
+++ b/gst/effectv/gsteffectv.c
@@ -25,12 +25,8 @@
#include "config.h"
#endif
-#include <string.h>
-#include <gst/gst.h>
-#include <gst/video/video.h>
#include "gsteffectv.h"
-
struct _elements_entry
{
gchar *name;
@@ -38,31 +34,17 @@ struct _elements_entry
};
static struct _elements_entry _elements[] = {
- {"edgeTV", gst_edgetv_get_type},
- {"agingTV", gst_agingtv_get_type},
- {"diceTV", gst_dicetv_get_type},
- {"warpTV", gst_warptv_get_type},
- {"shagadelicTV", gst_shagadelictv_get_type},
- {"vertigoTV", gst_vertigotv_get_type},
- {"revTV", gst_revtv_get_type},
- {"quarkTV", gst_quarktv_get_type},
+ {"edgetv", gst_edgetv_get_type},
+ {"agingtv", gst_agingtv_get_type},
+ {"dicetv", gst_dicetv_get_type},
+ {"warptv", gst_warptv_get_type},
+ {"shagadelictv", gst_shagadelictv_get_type},
+ {"vertigotv", gst_vertigotv_get_type},
+ {"revtv", gst_revtv_get_type},
+ {"quarktv", gst_quarktv_get_type},
{NULL, 0},
};
-
-GstStaticPadTemplate gst_effectv_src_template = GST_STATIC_PAD_TEMPLATE ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
- );
-
-GstStaticPadTemplate gst_effectv_sink_template =
- GST_STATIC_PAD_TEMPLATE ("sink",
- GST_PAD_SINK,
- GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
- );
-
static gboolean
plugin_init (GstPlugin * plugin)
{
diff --git a/gst/effectv/gsteffectv.h b/gst/effectv/gsteffectv.h
index 4cf7f3e3..ce032335 100644
--- a/gst/effectv/gsteffectv.h
+++ b/gst/effectv/gsteffectv.h
@@ -31,6 +31,3 @@ GType gst_shagadelictv_get_type (void);
GType gst_vertigotv_get_type (void);
GType gst_revtv_get_type (void);
GType gst_quarktv_get_type (void);
-
-extern GstStaticPadTemplate gst_effectv_sink_template;
-extern GstStaticPadTemplate gst_effectv_src_template;
diff --git a/gst/effectv/gstquark.c b/gst/effectv/gstquark.c
index 48641e6e..d0cc1971 100644
--- a/gst/effectv/gstquark.c
+++ b/gst/effectv/gstquark.c
@@ -24,10 +24,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
+
+#include <gstvideofilter.h>
+
#include <math.h>
#include <string.h>
-#include <gst/gst.h>
-#include "gsteffectv.h"
+
+#include <gst/video/video.h>
#define GST_TYPE_QUARKTV \
(gst_quarktv_get_type())
@@ -40,7 +43,7 @@
#define GST_IS_QUARKTV_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_QUARKTV))
-/* number of frames of time-buffer. It should be as a configurable paramter */
+/* number of frames of time-buffer. It should be as a configurable paramater */
/* This number also must be 2^n just for the speed. */
#define PLANES 16
@@ -49,9 +52,7 @@ typedef struct _GstQuarkTVClass GstQuarkTVClass;
struct _GstQuarkTV
{
- GstElement element;
-
- GstPad *sinkpad, *srcpad;
+ GstVideofilter element;
gint width, height;
gint area;
@@ -62,20 +63,7 @@ struct _GstQuarkTV
struct _GstQuarkTVClass
{
- GstElementClass parent_class;
-};
-
-/* elementfactory information */
-static GstElementDetails gst_quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV",
- "Filter/Effect/Video",
- "Motion dissolver",
- "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
-
-/* Filter signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
+ GstVideofilterClass parent_class;
};
enum
@@ -84,179 +72,103 @@ enum
ARG_PLANES
};
-static void gst_quarktv_base_init (gpointer g_class);
-static void gst_quarktv_class_init (GstQuarkTVClass * klass);
-static void gst_quarktv_init (GstQuarkTV * filter);
-
-static GstStateChangeReturn gst_quarktv_change_state (GstElement * element,
- GstStateChange transition);
-
-static void gst_quarktv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_quarktv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static GstFlowReturn gst_quarktv_chain (GstPad * pad, GstBuffer * buffer);
-
-static GstElementClass *parent_class = NULL;
-
-/* static guint gst_quarktv_signals[LAST_SIGNAL] = { 0 }; */
-
-static inline guint32
-fastrand (void)
-{
- static unsigned int fastrand_val;
+GType gst_quarktv_get_type (void);
- return (fastrand_val = fastrand_val * 1103515245 + 12345);
-}
+static GstElementDetails quarktv_details = GST_ELEMENT_DETAILS ("QuarkTV",
+ "Filter/Effect/Video",
+ "Motion dissolver",
+ "FUKUCHI, Kentarou <fukuchi@users.sourceforge.net>");
-GType
-gst_quarktv_get_type (void)
+static GstStaticPadTemplate gst_quarktv_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
+ );
+
+static GstStaticPadTemplate gst_quarktv_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx "; " GST_VIDEO_CAPS_RGBx)
+ );
+
+static GstVideofilterClass *parent_class = NULL;
+
+static gboolean
+gst_quarktv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GType quarktv_type = 0;
+ GstQuarkTV *filter = GST_QUARKTV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
- if (!quarktv_type) {
- static const GTypeInfo quarktv_info = {
- sizeof (GstQuarkTVClass),
- gst_quarktv_base_init,
- NULL,
- (GClassInitFunc) gst_quarktv_class_init,
- NULL,
- NULL,
- sizeof (GstQuarkTV),
- 0,
- (GInstanceInitFunc) gst_quarktv_init,
- };
+ structure = gst_caps_get_structure (incaps, 0);
- quarktv_type =
- g_type_register_static (GST_TYPE_ELEMENT, "GstQuarkTV", &quarktv_info,
- 0);
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ filter->area = filter->width * filter->height;
+ ret = TRUE;
}
- return quarktv_type;
-}
-
-static void
-gst_quarktv_base_init (gpointer g_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_effectv_src_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&gst_effectv_sink_template));
-
- gst_element_class_set_details (element_class, &gst_quarktv_details);
-}
-
-static void
-gst_quarktv_class_init (GstQuarkTVClass * klass)
-{
- GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
-
- gobject_class = (GObjectClass *) klass;
- gstelement_class = (GstElementClass *) klass;
-
- parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
-
- gobject_class->set_property = gst_quarktv_set_property;
- gobject_class->get_property = gst_quarktv_get_property;
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_PLANES,
- g_param_spec_int ("planes", "Planes", "Number of frames in the buffer",
- 1, 32, PLANES, G_PARAM_READWRITE));
-
- gstelement_class->change_state = gst_quarktv_change_state;
+ return ret;
}
-static GstPadLinkReturn
-gst_quarktv_link (GstPad * pad, GstPad * peer)
+static gboolean
+gst_quarktv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
GstQuarkTV *filter;
- GstPad *otherpad;
-
- //gint i;
- //GstStructure *structure;
- //GstPadLinkReturn res;
-
- filter = GST_QUARKTV (gst_pad_get_parent (pad));
- g_return_val_if_fail (GST_IS_QUARKTV (filter), GST_PAD_LINK_REFUSED);
-
- otherpad = (pad == filter->srcpad ? filter->sinkpad : filter->srcpad);
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
-#if 0
- res = gst_pad_try_set_caps (otherpad, caps);
- if (GST_PAD_LINK_FAILED (res))
- return res;
+ filter = GST_QUARKTV (btrans);
structure = gst_caps_get_structure (caps, 0);
- if (!gst_structure_get_int (structure, "width", &filter->width) ||
- !gst_structure_get_int (structure, "height", &filter->height))
- return GST_PAD_LINK_REFUSED;
-
- filter->area = filter->width * filter->height;
- for (i = 0; i < filter->planes; i++) {
- if (filter->planetable[i])
- gst_buffer_unref (filter->planetable[i]);
- filter->planetable[i] = NULL;
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
}
-#endif
- return GST_PAD_LINK_OK;
+ return ret;
}
-static void
-gst_quarktv_init (GstQuarkTV * filter)
+static inline guint32
+fastrand (void)
{
- filter->sinkpad =
- gst_pad_new_from_template (gst_static_pad_template_get
- (&gst_effectv_sink_template), "sink");
- //gst_pad_set_getcaps_function (filter->sinkpad, gst_pad_proxy_getcaps);
- gst_pad_set_chain_function (filter->sinkpad, gst_quarktv_chain);
- gst_pad_set_link_function (filter->sinkpad, gst_quarktv_link);
- gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad);
-
- filter->srcpad =
- gst_pad_new_from_template (gst_static_pad_template_get
- (&gst_effectv_src_template), "src");
- //gst_pad_set_getcaps_function (filter->srcpad, gst_pad_proxy_getcaps);
- gst_pad_set_link_function (filter->srcpad, gst_quarktv_link);
- gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad);
+ static unsigned int fastrand_val;
- filter->planes = PLANES;
- filter->current_plane = filter->planes - 1;
+ return (fastrand_val = fastrand_val * 1103515245 + 12345);
}
static GstFlowReturn
-gst_quarktv_chain (GstPad * pad, GstBuffer * buf)
+gst_quarktv_transform (GstBaseTransform * trans, GstBuffer * in,
+ GstBuffer * out)
{
GstQuarkTV *filter;
- guint32 *src, *dest;
- GstBuffer *outbuf;
gint area;
- GstFlowReturn ret;
+ guint32 *src, *dest;
+ GstFlowReturn ret = GST_FLOW_OK;
- filter = GST_QUARKTV (gst_pad_get_parent (pad));
+ filter = GST_QUARKTV (trans);
- src = (guint32 *) GST_BUFFER_DATA (buf);
+ gst_buffer_stamp (out, in);
area = filter->area;
-
- ret =
- gst_pad_alloc_buffer (filter->srcpad, 0, area, GST_PAD_CAPS (pad),
- &outbuf);
- if (ret != GST_FLOW_OK)
- goto no_buffer;
-
- dest = (guint32 *) GST_BUFFER_DATA (outbuf);
- GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
+ src = (guint32 *) GST_BUFFER_DATA (in);
+ dest = (guint32 *) GST_BUFFER_DATA (out);
if (filter->planetable[filter->current_plane])
gst_buffer_unref (filter->planetable[filter->current_plane]);
- filter->planetable[filter->current_plane] = buf;
+ filter->planetable[filter->current_plane] = gst_buffer_ref (in);
+ /* For each pixel */
while (--area) {
GstBuffer *rand;
@@ -265,27 +177,37 @@ gst_quarktv_chain (GstPad * pad, GstBuffer * buf)
filter->planetable[(filter->current_plane +
(fastrand () >> 24)) & (filter->planes - 1)];
+ /* Copy the pixel from the random buffer to dest */
dest[area] = (rand ? ((guint32 *) GST_BUFFER_DATA (rand))[area] : 0);
}
- ret = gst_pad_push (filter->srcpad, outbuf);
-
filter->current_plane--;
if (filter->current_plane < 0)
filter->current_plane = filter->planes - 1;
return ret;
-
-no_buffer:
- {
- return ret;
- }
}
static GstStateChangeReturn
gst_quarktv_change_state (GstElement * element, GstStateChange transition)
{
GstQuarkTV *filter = GST_QUARKTV (element);
+ GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ {
+ filter->planetable =
+ (GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *));
+ memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *));
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (GST_ELEMENT_CLASS (parent_class)->change_state)
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
@@ -301,18 +223,11 @@ gst_quarktv_change_state (GstElement * element, GstStateChange transition)
filter->planetable = NULL;
break;
}
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- {
- filter->planetable =
- (GstBuffer **) g_malloc (filter->planes * sizeof (GstBuffer *));
- memset (filter->planetable, 0, filter->planes * sizeof (GstBuffer *));
- break;
- }
default:
break;
}
- return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ return ret;
}
@@ -377,3 +292,72 @@ gst_quarktv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
+
+static void
+gst_quarktv_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &quarktv_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_quarktv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_quarktv_src_template));
+}
+
+static void
+gst_quarktv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = GST_DEBUG_FUNCPTR (gst_quarktv_set_property);
+ gobject_class->get_property = GST_DEBUG_FUNCPTR (gst_quarktv_get_property);
+
+ element_class->change_state = GST_DEBUG_FUNCPTR (gst_quarktv_change_state);
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_quarktv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_quarktv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_quarktv_transform);
+}
+
+static void
+gst_quarktv_init (GTypeInstance * instance, gpointer g_class)
+{
+ GstQuarkTV *filter = GST_QUARKTV (instance);
+
+ filter->planes = PLANES;
+ filter->current_plane = filter->planes - 1;
+}
+
+GType
+gst_quarktv_get_type (void)
+{
+ static GType quarktv_type = 0;
+
+ if (!quarktv_type) {
+ static const GTypeInfo quarktv_info = {
+ sizeof (GstQuarkTVClass),
+ gst_quarktv_base_init,
+ NULL,
+ gst_quarktv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstQuarkTV),
+ 0,
+ gst_quarktv_init,
+ };
+
+ quarktv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
+ "GstQuarkTV", &quarktv_info, 0);
+ }
+ return quarktv_type;
+}
diff --git a/gst/effectv/gstrev.c b/gst/effectv/gstrev.c
index 429a9b1c..331d6b37 100644
--- a/gst/effectv/gstrev.c
+++ b/gst/effectv/gstrev.c
@@ -43,10 +43,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
+
+#include <gstvideofilter.h>
+
#include <math.h>
#include <string.h>
-#include <gst/gst.h>
-#include <gstvideofilter.h>
+
+#include <gst/video/video.h>
#define GST_TYPE_REVTV \
(gst_revtv_get_type())
@@ -78,15 +81,6 @@ struct _GstRevTV
struct _GstRevTVClass
{
GstVideofilterClass parent_class;
-
- void (*reset) (GstElement * element);
-};
-
-/* Filter signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
};
enum
@@ -97,132 +91,87 @@ enum
ARG_GAIN
};
-static void gst_revtv_base_init (gpointer g_class);
-static void gst_revtv_class_init (gpointer g_class, gpointer class_data);
-static void gst_revtv_init (GTypeInstance * instance, gpointer g_class);
-
-static void gst_revtv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_revtv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_revtv_setup (GstVideofilter * videofilter);
-static void gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
+GType gst_revtv_get_type (void);
-/* static guint gst_revtv_signals[LAST_SIGNAL] = { 0 }; */
+static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV",
+ "Filter/Effect/Video",
+ "A video waveform monitor for each line of video processed",
+ "Wim Taymans <wim.taymans@chello.be>");
-GType
-gst_revtv_get_type (void)
-{
- static GType revtv_type = 0;
-
- if (!revtv_type) {
- static const GTypeInfo revtv_info = {
- sizeof (GstRevTVClass),
- gst_revtv_base_init,
- NULL,
- (GClassInitFunc) gst_revtv_class_init,
- NULL,
- NULL,
- sizeof (GstRevTV),
- 0,
- (GInstanceInitFunc) gst_revtv_init,
- };
+static GstStaticPadTemplate gst_revtv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
- revtv_type =
- g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info,
- 0);
- }
- return revtv_type;
-}
+static GstStaticPadTemplate gst_revtv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
-static GstVideofilterFormat gst_revtv_formats[] = {
- {"RGB ", 32, gst_revtv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000}
-};
+static GstVideofilterClass *parent_class = NULL;
-static void
-gst_revtv_base_init (gpointer g_class)
+static gboolean
+gst_revtv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- /* elementfactory information */
- static GstElementDetails gst_revtv_details = GST_ELEMENT_DETAILS ("RevTV",
- "Filter/Effect/Video",
- "A video waveform monitor for each line of video processed",
- "Wim Taymans <wim.taymans@chello.be>");
+ GstRevTV *filter = GST_REVTV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
+ structure = gst_caps_get_structure (incaps, 0);
- gst_element_class_set_details (element_class, &gst_revtv_details);
-
- for (i = 0; i < G_N_ELEMENTS (gst_revtv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class, gst_revtv_formats + i);
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ ret = TRUE;
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
-}
-
-static void
-gst_revtv_class_init (gpointer klass, gpointer class_data)
-{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
-
- gobject_class = G_OBJECT_CLASS (klass);
- videofilter_class = GST_VIDEOFILTER_CLASS (klass);
-
- gobject_class->set_property = gst_revtv_set_property;
- gobject_class->get_property = gst_revtv_get_property;
-
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY,
- g_param_spec_int ("delay", "Delay", "Delay in frames between updates",
- 1, 100, 1, G_PARAM_READWRITE));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE,
- g_param_spec_int ("linespace", "Linespace", "Control line spacing",
- 1, 100, 6, G_PARAM_READWRITE));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN,
- g_param_spec_int ("gain", "Gain", "Control gain",
- 1, 200, 50, G_PARAM_READWRITE));
-
- videofilter_class->setup = gst_revtv_setup;
+ return ret;
}
-static void
-gst_revtv_init (GTypeInstance * instance, gpointer g_class)
+static gboolean
+gst_revtv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- GstRevTV *restv = GST_REVTV (instance);
+ GstRevTV *filter;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- restv->vgrabtime = 1;
- restv->vgrab = 0;
- restv->linespace = 6;
- restv->vscale = 50;
-}
+ filter = GST_REVTV (btrans);
-static void
-gst_revtv_setup (GstVideofilter * videofilter)
-{
- GstRevTV *revtv;
+ structure = gst_caps_get_structure (caps, 0);
- g_return_if_fail (GST_IS_REVTV (videofilter));
- revtv = GST_REVTV (videofilter);
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
- revtv->width = gst_videofilter_get_input_width (videofilter);
- revtv->height = gst_videofilter_get_input_height (videofilter);
+ return ret;
}
-static void
-gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_revtv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
GstRevTV *filter;
guint32 *src, *dest;
gint width, height;
guint32 *nsrc;
gint y, x, R, G, B, yval;
+ GstFlowReturn ret = GST_FLOW_OK;
- filter = GST_REVTV (videofilter);
+ filter = GST_REVTV (trans);
- src = (guint32 *) s;
- dest = (guint32 *) d;
+ gst_buffer_stamp (out, in);
+
+ src = (guint32 *) GST_BUFFER_DATA (in);
+ dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width;
height = filter->height;
@@ -247,6 +196,8 @@ gst_revtv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
}
}
}
+
+ return ret;
}
static void
@@ -299,3 +250,83 @@ gst_revtv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
+
+static void
+gst_revtv_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &gst_revtv_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_revtv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_revtv_src_template));
+}
+
+static void
+gst_revtv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = gst_revtv_set_property;
+ gobject_class->get_property = gst_revtv_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DELAY,
+ g_param_spec_int ("delay", "Delay", "Delay in frames between updates",
+ 1, 100, 1, G_PARAM_READWRITE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_LINESPACE,
+ g_param_spec_int ("linespace", "Linespace", "Control line spacing",
+ 1, 100, 6, G_PARAM_READWRITE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GAIN,
+ g_param_spec_int ("gain", "Gain", "Control gain",
+ 1, 200, 50, G_PARAM_READWRITE));
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_revtv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_revtv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_revtv_transform);
+}
+
+static void
+gst_revtv_init (GTypeInstance * instance, gpointer g_class)
+{
+ GstRevTV *restv = GST_REVTV (instance);
+
+ restv->vgrabtime = 1;
+ restv->vgrab = 0;
+ restv->linespace = 6;
+ restv->vscale = 50;
+}
+
+GType
+gst_revtv_get_type (void)
+{
+ static GType revtv_type = 0;
+
+ if (!revtv_type) {
+ static const GTypeInfo revtv_info = {
+ sizeof (GstRevTVClass),
+ gst_revtv_base_init,
+ NULL,
+ (GClassInitFunc) gst_revtv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstRevTV),
+ 0,
+ (GInstanceInitFunc) gst_revtv_init,
+ };
+
+ revtv_type =
+ g_type_register_static (GST_TYPE_VIDEOFILTER, "GstRevTV", &revtv_info,
+ 0);
+ }
+ return revtv_type;
+}
diff --git a/gst/effectv/gstshagadelic.c b/gst/effectv/gstshagadelic.c
index e3c50d23..cedf9c92 100644
--- a/gst/effectv/gstshagadelic.c
+++ b/gst/effectv/gstshagadelic.c
@@ -25,10 +25,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
+
+#include <gstvideofilter.h>
+
#include <math.h>
#include <string.h>
-#include <gst/gst.h>
-#include <gstvideofilter.h>
+
+#include <gst/video/video.h>
#define GST_TYPE_SHAGADELICTV \
(gst_shagadelictv_get_type())
@@ -64,135 +67,81 @@ struct _GstShagadelicTVClass
GstVideofilterClass parent_class;
};
-/* Filter signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- ARG_0
-};
-
-static void gst_shagadelictv_base_init (gpointer g_class);
-static void gst_shagadelictv_class_init (gpointer g_class, gpointer class_data);
-static void gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class);
+GType gst_shagadelictv_get_type (void);
static void gst_shagadelic_initialize (GstShagadelicTV * filter);
-static void gst_shagadelictv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_shagadelictv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_shagadelictv_setup (GstVideofilter * videofilter);
-static void gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d,
- void *s);
-
-/*static guint gst_shagadelictv_signals[LAST_SIGNAL] = { 0 }; */
-
-GType
-gst_shagadelictv_get_type (void)
+static GstElementDetails shagadelictv_details =
+GST_ELEMENT_DETAILS ("ShagadelicTV",
+ "Filter/Effect/Video",
+ "Oh behave, ShagedelicTV makes images shagadelic!",
+ "Wim Taymans <wim.taymans@chello.be>");
+
+static GstStaticPadTemplate gst_shagadelictv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
+
+static GstStaticPadTemplate gst_shagadelictv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
+
+static GstVideofilterClass *parent_class = NULL;
+
+static gboolean
+gst_shagadelictv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GType shagadelictv_type = 0;
-
- if (!shagadelictv_type) {
- static const GTypeInfo shagadelictv_info = {
- sizeof (GstShagadelicTVClass),
- gst_shagadelictv_base_init,
- NULL,
- (GClassInitFunc) gst_shagadelictv_class_init,
- NULL,
- NULL,
- sizeof (GstShagadelicTV),
- 0,
- (GInstanceInitFunc) gst_shagadelictv_init,
- };
+ GstShagadelicTV *filter = GST_SHAGADELICTV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
- shagadelictv_type =
- g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV",
- &shagadelictv_info, 0);
- }
- return shagadelictv_type;
-}
-
-static GstVideofilterFormat gst_shagadelictv_formats[] = {
- {"RGB ", 32, gst_shagadelictv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000}
-};
+ structure = gst_caps_get_structure (incaps, 0);
-static void
-gst_shagadelictv_base_init (gpointer g_class)
-{
- /* elementfactory information */
- static GstElementDetails gst_shagadelictv_details =
- GST_ELEMENT_DETAILS ("ShagadelicTV",
- "Filter/Effect/Video",
- "Oh behave, ShagedelicTV makes images shagadelic!",
- "Wim Taymans <wim.taymans@chello.be>");
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ gint area = filter->width * filter->height;
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
+ g_free (filter->ripple);
+ g_free (filter->spiral);
- gst_element_class_set_details (element_class, &gst_shagadelictv_details);
+ filter->ripple = (gchar *) g_malloc (area * 4);
+ filter->spiral = (gchar *) g_malloc (area);
- for (i = 0; i < G_N_ELEMENTS (gst_shagadelictv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_shagadelictv_formats + i);
+ gst_shagadelic_initialize (filter);
+ ret = TRUE;
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
-}
-
-static void
-gst_shagadelictv_class_init (gpointer g_class, gpointer class_data)
-{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
-
- gobject_class = G_OBJECT_CLASS (g_class);
- videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
-
- gobject_class->set_property = gst_shagadelictv_set_property;
- gobject_class->get_property = gst_shagadelictv_get_property;
-
- videofilter_class->setup = gst_shagadelictv_setup;
-}
-
-static void
-gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class)
-{
- GstShagadelicTV *filter = GST_SHAGADELICTV (instance);
-
- filter->ripple = NULL;
- filter->spiral = NULL;
+ return ret;
}
-static void
-gst_shagadelictv_setup (GstVideofilter * videofilter)
+static gboolean
+gst_shagadelictv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
GstShagadelicTV *filter;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
- int area;
-
- g_return_if_fail (GST_IS_SHAGADELICTV (videofilter));
- filter = GST_SHAGADELICTV (videofilter);
-
- filter->width = width;
- filter->height = height;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- area = filter->width * filter->height;
+ filter = GST_SHAGADELICTV (btrans);
- g_free (filter->ripple);
- g_free (filter->spiral);
+ structure = gst_caps_get_structure (caps, 0);
- filter->ripple = (gchar *) g_malloc (area * 4);
- filter->spiral = (gchar *) g_malloc (area);
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
- gst_shagadelic_initialize (filter);
+ return ret;
}
static unsigned int
@@ -261,8 +210,9 @@ gst_shagadelic_initialize (GstShagadelicTV * filter)
filter->phase = 0;
}
-static void
-gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_shagadelictv_transform (GstBaseTransform * trans, GstBuffer * in,
+ GstBuffer * out)
{
GstShagadelicTV *filter;
guint32 *src, *dest;
@@ -270,11 +220,14 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
guint32 v;
guchar r, g, b;
gint width, height;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ filter = GST_SHAGADELICTV (trans);
- filter = GST_SHAGADELICTV (videofilter);
+ gst_buffer_stamp (out, in);
- src = (guint32 *) s;
- dest = (guint32 *) d;
+ src = (guint32 *) GST_BUFFER_DATA (in);
+ dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width;
height = filter->height;
@@ -310,37 +263,72 @@ gst_shagadelictv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
filter->ry += filter->rvy;
filter->bx += filter->bvx;
filter->by += filter->bvy;
+
+ return ret;
}
static void
-gst_shagadelictv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
+gst_shagadelictv_base_init (gpointer g_class)
{
- GstShagadelicTV *filter;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- g_return_if_fail (GST_IS_SHAGADELICTV (object));
+ gst_element_class_set_details (element_class, &shagadelictv_details);
- filter = GST_SHAGADELICTV (object);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_shagadelictv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_shagadelictv_src_template));
+}
- switch (prop_id) {
- default:
- break;
- }
+static void
+gst_shagadelictv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_shagadelictv_set_caps);
+ trans_class->get_unit_size =
+ GST_DEBUG_FUNCPTR (gst_shagadelictv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_shagadelictv_transform);
}
static void
-gst_shagadelictv_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
+gst_shagadelictv_init (GTypeInstance * instance, gpointer g_class)
{
- GstShagadelicTV *filter;
+ GstShagadelicTV *filter = GST_SHAGADELICTV (instance);
- g_return_if_fail (GST_IS_SHAGADELICTV (object));
+ filter->ripple = NULL;
+ filter->spiral = NULL;
+}
- filter = GST_SHAGADELICTV (object);
+GType
+gst_shagadelictv_get_type (void)
+{
+ static GType shagadelictv_type = 0;
+
+ if (!shagadelictv_type) {
+ static const GTypeInfo shagadelictv_info = {
+ sizeof (GstShagadelicTVClass),
+ gst_shagadelictv_base_init,
+ NULL,
+ (GClassInitFunc) gst_shagadelictv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstShagadelicTV),
+ 0,
+ (GInstanceInitFunc) gst_shagadelictv_init,
+ };
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
+ shagadelictv_type =
+ g_type_register_static (GST_TYPE_VIDEOFILTER, "GstShagadelicTV",
+ &shagadelictv_info, 0);
}
+ return shagadelictv_type;
}
diff --git a/gst/effectv/gstvertigo.c b/gst/effectv/gstvertigo.c
index d44a5a0b..abe92a52 100644
--- a/gst/effectv/gstvertigo.c
+++ b/gst/effectv/gstvertigo.c
@@ -25,10 +25,13 @@
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
+
+#include <gstvideofilter.h>
+
#include <math.h>
#include <string.h>
-#include <gst/gst.h>
-#include <gstvideofilter.h>
+
+#include <gst/video/video.h>
#define GST_TYPE_VERTIGOTV \
(gst_vertigotv_get_type())
@@ -61,18 +64,11 @@ struct _GstVertigoTV
struct _GstVertigoTVClass
{
GstVideofilterClass parent_class;
-
- void (*reset) (GstElement * element);
};
-/* Filter signals and args */
-enum
-{
- /* FILL ME */
- RESET_SIGNAL,
- LAST_SIGNAL
-};
+GType gst_vertigotv_get_type (void);
+/* Filter signals and args */
enum
{
ARG_0,
@@ -80,152 +76,77 @@ enum
ARG_ZOOM_SPEED
};
-static void gst_vertigotv_base_init (gpointer g_class);
-static void gst_vertigotv_class_init (GstVertigoTVClass * klass,
- gpointer class_data);
-static void gst_vertigotv_init (GTypeInstance * instance, gpointer g_class);
-static void gst_vertigotv_setup (GstVideofilter * videofilter);
-
-static void gst_vertigotv_reset_handler (GstElement * element);
-
-static void gst_vertigotv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_vertigotv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-static void gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d,
- void *s);
-
-static guint gst_vertigotv_signals[LAST_SIGNAL] = { 0 };
-
-GType
-gst_vertigotv_get_type (void)
+static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV",
+ "Filter/Effect/Video",
+ "A loopback alpha blending effector with rotating and scaling",
+ "Wim Taymans <wim.taymans@chello.be>");
+
+static GstStaticPadTemplate gst_vertigotv_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
+
+static GstStaticPadTemplate gst_vertigotv_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_BGRx)
+ );
+
+static GstVideofilterClass *parent_class = NULL;
+
+static gboolean
+gst_vertigotv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GType vertigotv_type = 0;
+ GstVertigoTV *filter = GST_VERTIGOTV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
- if (!vertigotv_type) {
- static const GTypeInfo vertigotv_info = {
- sizeof (GstVertigoTVClass),
- gst_vertigotv_base_init,
- NULL,
- (GClassInitFunc) gst_vertigotv_class_init,
- NULL,
- NULL,
- sizeof (GstVertigoTV),
- 0,
- (GInstanceInitFunc) gst_vertigotv_init,
- };
+ structure = gst_caps_get_structure (incaps, 0);
- vertigotv_type =
- g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV",
- &vertigotv_info, 0);
- }
- return vertigotv_type;
-}
-
-static GstVideofilterFormat gst_vertigotv_formats[] = {
- {"RGB ", 32, gst_vertigotv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000}
-};
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ gint area = filter->width * filter->height;
-static void
-gst_vertigotv_base_init (gpointer g_class)
-{
- /* elementfactory information */
- static GstElementDetails vertigotv_details = GST_ELEMENT_DETAILS ("VertigoTV",
- "Filter/Effect/Video",
- "A loopback alpha blending effector with rotating and scaling",
- "Wim Taymans <wim.taymans@chello.be>");
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
+ g_free (filter->buffer);
+ filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32));
- gst_element_class_set_details (element_class, &vertigotv_details);
+ memset (filter->buffer, 0, area * 2 * sizeof (guint32));
+ filter->current_buffer = filter->buffer;
+ filter->alt_buffer = filter->buffer + area;
+ filter->phase = 0;
- for (i = 0; i < G_N_ELEMENTS (gst_vertigotv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_vertigotv_formats + i);
+ ret = TRUE;
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
+ return ret;
}
-static void
-gst_vertigotv_class_init (GstVertigoTVClass * klass, gpointer class_data)
-{
- GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
- GstVideofilterClass *videofilter_class;
-
- gobject_class = (GObjectClass *) klass;
- gstelement_class = (GstElementClass *) klass;
- videofilter_class = GST_VIDEOFILTER_CLASS (klass);
-
- gst_vertigotv_signals[RESET_SIGNAL] =
- g_signal_new ("reset-parms",
- G_TYPE_FROM_CLASS (klass),
- G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
- G_STRUCT_OFFSET (GstVertigoTVClass, reset),
- NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
-
- klass->reset = gst_vertigotv_reset_handler;
-
- gobject_class->set_property = gst_vertigotv_set_property;
- gobject_class->get_property = gst_vertigotv_get_property;
-
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED,
- g_param_spec_float ("speed", "Speed", "Control the speed of movement",
- 0.01, 100.0, 0.02, G_PARAM_READWRITE));
- g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED,
- g_param_spec_float ("zoom_speed", "Zoom Speed",
- "Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE));
-
- videofilter_class->setup = gst_vertigotv_setup;
-}
-
-static void
-gst_vertigotv_reset_handler (GstElement * element)
-{
- GstVertigoTV *filter = GST_VERTIGOTV (element);
-
- filter->phase = 0.0;
- filter->phase_increment = 0.02;
- filter->zoomrate = 1.01;
-}
-
-static void
-gst_vertigotv_setup (GstVideofilter * videofilter)
+static gboolean
+gst_vertigotv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
GstVertigoTV *filter;
- gint area;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
-
- g_return_if_fail (GST_IS_VERTIGOTV (videofilter));
- filter = GST_VERTIGOTV (videofilter);
-
- filter->width = width;
- filter->height = height;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- area = width * height;
+ filter = GST_VERTIGOTV (btrans);
- g_free (filter->buffer);
- filter->buffer = (guint32 *) g_malloc (area * 2 * sizeof (guint32));
+ structure = gst_caps_get_structure (caps, 0);
- memset (filter->buffer, 0, area * 2 * sizeof (guint32));
- filter->current_buffer = filter->buffer;
- filter->alt_buffer = filter->buffer + area;
- filter->phase = 0;
-}
-
-static void
-gst_vertigotv_init (GTypeInstance * instance, gpointer g_class)
-{
- GstVertigoTV *filter = GST_VERTIGOTV (instance);
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
+ }
- filter->buffer = NULL;
- filter->phase = 0.0;
- filter->phase_increment = 0.02;
- filter->zoomrate = 1.01;
+ return ret;
}
static void
@@ -276,22 +197,22 @@ gst_vertigotv_set_parms (GstVertigoTV * filter)
filter->phase = 0;
}
-static void
-gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_vertigotv_transform (GstBaseTransform * trans, GstBuffer * in,
+ GstBuffer * out)
{
GstVertigoTV *filter;
- guint32 *src, *dest;
- guint32 *p;
+ guint32 *src, *dest, *p;
guint32 v;
- gint x, y;
- gint ox, oy;
- gint i;
- gint width, height, area;
+ gint x, y, ox, oy, i, width, height, area;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ filter = GST_VERTIGOTV (trans);
- filter = GST_VERTIGOTV (videofilter);
+ gst_buffer_stamp (out, in);
- src = (guint32 *) s;
- dest = (guint32 *) d;
+ src = (guint32 *) GST_BUFFER_DATA (in);
+ dest = (guint32 *) GST_BUFFER_DATA (out);
width = filter->width;
height = filter->height;
@@ -327,6 +248,8 @@ gst_vertigotv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
p = filter->current_buffer;
filter->current_buffer = filter->alt_buffer;
filter->alt_buffer = p;
+
+ return ret;
}
static void
@@ -373,3 +296,80 @@ gst_vertigotv_get_property (GObject * object, guint prop_id, GValue * value,
break;
}
}
+
+static void
+gst_vertigotv_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &vertigotv_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_vertigotv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_vertigotv_src_template));
+}
+
+static void
+gst_vertigotv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->set_property = gst_vertigotv_set_property;
+ gobject_class->get_property = gst_vertigotv_get_property;
+
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SPEED,
+ g_param_spec_float ("speed", "Speed", "Control the speed of movement",
+ 0.01, 100.0, 0.02, G_PARAM_READWRITE));
+ g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ZOOM_SPEED,
+ g_param_spec_float ("zoom_speed", "Zoom Speed",
+ "Control the rate of zooming", 1.01, 1.1, 1.01, G_PARAM_READWRITE));
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_vertigotv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_vertigotv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_vertigotv_transform);
+}
+
+static void
+gst_vertigotv_init (GTypeInstance * instance, gpointer g_class)
+{
+ GstVertigoTV *filter = GST_VERTIGOTV (instance);
+
+ filter->buffer = NULL;
+ filter->phase = 0.0;
+ filter->phase_increment = 0.02;
+ filter->zoomrate = 1.01;
+}
+
+GType
+gst_vertigotv_get_type (void)
+{
+ static GType vertigotv_type = 0;
+
+ if (!vertigotv_type) {
+ static const GTypeInfo vertigotv_info = {
+ sizeof (GstVertigoTVClass),
+ gst_vertigotv_base_init,
+ NULL,
+ (GClassInitFunc) gst_vertigotv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVertigoTV),
+ 0,
+ (GInstanceInitFunc) gst_vertigotv_init,
+ };
+
+ vertigotv_type =
+ g_type_register_static (GST_TYPE_VIDEOFILTER, "GstVertigoTV",
+ &vertigotv_info, 0);
+ }
+ return vertigotv_type;
+}
diff --git a/gst/effectv/gstwarp.c b/gst/effectv/gstwarp.c
index 4030613d..4cf4b548 100644
--- a/gst/effectv/gstwarp.c
+++ b/gst/effectv/gstwarp.c
@@ -37,17 +37,17 @@
#include "config.h"
#endif
-#include <gst/gst.h>
#include <gstvideofilter.h>
+
#include <string.h>
#include <math.h>
-#include "gsteffectv.h"
+
+#include <gst/video/video.h>
#ifndef M_PI
#define M_PI 3.14159265358979323846
#endif
-
#define GST_TYPE_WARPTV \
(gst_warptv_get_type())
#define GST_WARPTV(obj) \
@@ -79,196 +79,85 @@ struct _GstWarpTVClass
GstVideofilterClass parent_class;
};
+GType gst_warptv_get_type (void);
-/* GstWarpTV signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- ARG_0
- /* FILL ME */
-};
-
-static void gst_warptv_base_init (gpointer g_class);
-static void gst_warptv_class_init (gpointer g_class, gpointer class_data);
-static void gst_warptv_init (GTypeInstance * instance, gpointer g_class);
-
-static void gst_warptv_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_warptv_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static void gst_warptv_setup (GstVideofilter * videofilter);
static void initSinTable (GstWarpTV * filter);
static void initOffsTable (GstWarpTV * filter);
static void initDistTable (GstWarpTV * filter);
-static void gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s);
-GType
-gst_warptv_get_type (void)
+static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV",
+ "Filter/Effect/Video",
+ "WarpTV does realtime goo'ing of the video input",
+ "Sam Lantinga <slouken@devolution.com>");
+
+static GstStaticPadTemplate gst_warptv_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
+ GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ );
+
+static GstStaticPadTemplate gst_warptv_sink_template =
+ GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_RGBx ";" GST_VIDEO_CAPS_xRGB ";"
+ GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xBGR)
+ );
+
+static GstVideofilterClass *parent_class = NULL;
+
+static gboolean
+gst_warptv_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GType warptv_type = 0;
+ GstWarpTV *filter = GST_WARPTV (btrans);
+ GstStructure *structure;
+ gboolean ret = FALSE;
- if (!warptv_type) {
- static const GTypeInfo warptv_info = {
- sizeof (GstWarpTVClass),
- gst_warptv_base_init,
- NULL,
- gst_warptv_class_init,
- NULL,
- NULL,
- sizeof (GstWarpTV),
- 0,
- gst_warptv_init,
- };
+ structure = gst_caps_get_structure (incaps, 0);
- warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
- "GstWarpTV", &warptv_info, 0);
- }
- return warptv_type;
-}
+ if (gst_structure_get_int (structure, "width", &filter->width) &&
+ gst_structure_get_int (structure, "height", &filter->height)) {
+ g_free (filter->disttable);
+ g_free (filter->offstable);
-static GstVideofilterFormat gst_warptv_formats[] = {
- {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x00ff0000, 0x0000ff00,
- 0x000000ff},
- {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0xff000000, 0x00ff0000,
- 0x0000ff00},
- {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x000000ff, 0x0000ff00,
- 0x00ff0000},
- {"RGB ", 32, gst_warptv_rgb32, 24, G_BIG_ENDIAN, 0x0000ff00, 0x00ff0000,
- 0xff000000},
-};
+ filter->offstable = g_malloc (filter->height * sizeof (guint32));
+ filter->disttable =
+ g_malloc (filter->width * filter->height * sizeof (guint32));
-static void
-gst_warptv_base_init (gpointer g_class)
-{
- static GstElementDetails warptv_details = GST_ELEMENT_DETAILS ("WarpTV",
- "Filter/Effect/Video",
- "WarpTV does realtime goo'ing of the video input",
- "Sam Lantinga <slouken@devolution.com>");
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
-
- gst_element_class_set_details (element_class, &warptv_details);
-
- for (i = 0; i < G_N_ELEMENTS (gst_warptv_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_warptv_formats + i);
+ initSinTable (filter);
+ initOffsTable (filter);
+ initDistTable (filter);
+ ret = TRUE;
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
+ return ret;
}
-static void
-gst_warptv_class_init (gpointer g_class, gpointer class_data)
+static gboolean
+gst_warptv_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
-
- gobject_class = G_OBJECT_CLASS (g_class);
- videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
-
- gobject_class->set_property = gst_warptv_set_property;
- gobject_class->get_property = gst_warptv_get_property;
-
-#if 0
- g_object_class_install_property (gobject_class, ARG_METHOD,
- g_param_spec_enum ("method", "method", "method",
- GST_TYPE_WARPTV_METHOD, GST_WARPTV_METHOD_1, G_PARAM_READWRITE));
-#endif
-
- videofilter_class->setup = gst_warptv_setup;
-}
-
-static void
-gst_warptv_init (GTypeInstance * instance, gpointer g_class)
-{
- GstWarpTV *warptv = GST_WARPTV (instance);
- GstVideofilter *videofilter;
-
- GST_DEBUG ("gst_warptv_init");
-
- videofilter = GST_VIDEOFILTER (warptv);
-
- /* do stuff */
-}
-
-static void
-gst_warptv_set_property (GObject * object, guint prop_id, const GValue * value,
- GParamSpec * pspec)
-{
- GstWarpTV *src;
-
- g_return_if_fail (GST_IS_WARPTV (object));
- src = GST_WARPTV (object);
-
- GST_DEBUG ("gst_warptv_set_property");
- switch (prop_id) {
-#if 0
- case ARG_METHOD:
- src->method = g_value_get_enum (value);
- break;
-#endif
- default:
- break;
- }
-}
+ GstWarpTV *filter;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
-static void
-gst_warptv_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstWarpTV *src;
+ filter = GST_WARPTV (btrans);
- g_return_if_fail (GST_IS_WARPTV (object));
- src = GST_WARPTV (object);
+ structure = gst_caps_get_structure (caps, 0);
- switch (prop_id) {
-#if 0
- case ARG_METHOD:
- g_value_set_enum (value, src->method);
- break;
-#endif
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = width * height * 32 / 8;
+ ret = TRUE;
+ GST_DEBUG_OBJECT (filter, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
}
-}
-
-
-static void
-gst_warptv_setup (GstVideofilter * videofilter)
-{
- GstWarpTV *warptv;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
-
- g_return_if_fail (GST_IS_WARPTV (videofilter));
- warptv = GST_WARPTV (videofilter);
-
- /* if any setup needs to be done, do it here */
-
- warptv->width = width;
- warptv->height = height;
-#if 0
- /* FIXME this should be reset in PAUSE->READY, not here */
- warptv->tval = 0;
-#endif
-
- g_free (warptv->disttable);
- g_free (warptv->offstable);
- warptv->offstable = g_malloc (height * sizeof (guint32));
- warptv->disttable = g_malloc (width * height * sizeof (guint32));
-
- initSinTable (warptv);
- initOffsTable (warptv);
- initDistTable (warptv);
+ return ret;
}
static void
@@ -323,21 +212,21 @@ initDistTable (GstWarpTV * filter)
#endif
}
-static void
-gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
+static GstFlowReturn
+gst_warptv_transform (GstBaseTransform * trans, GstBuffer * in, GstBuffer * out)
{
- GstWarpTV *warptv;
- int width = gst_videofilter_get_input_width (videofilter);
- int height = gst_videofilter_get_input_height (videofilter);
- guint32 *src = s;
- guint32 *dest = d;
+ GstWarpTV *warptv = GST_WARPTV (trans);
+ int width = warptv->width;
+ int height = warptv->height;
+ guint32 *src = (guint32 *) GST_BUFFER_DATA (in);
+ guint32 *dest = (guint32 *) GST_BUFFER_DATA (out);
gint xw, yw, cw;
gint32 c, i, x, y, dx, dy, maxx, maxy;
gint32 skip, *ctptr, *distptr;
gint32 *sintable, *ctable;
+ GstFlowReturn ret = GST_FLOW_OK;
- g_return_if_fail (GST_IS_WARPTV (videofilter));
- warptv = GST_WARPTV (videofilter);
+ gst_buffer_stamp (out, in);
xw = (gint) (sin ((warptv->tval + 100) * M_PI / 128) * 30);
yw = (gint) (sin ((warptv->tval) * M_PI / 256) * -35);
@@ -383,4 +272,66 @@ gst_warptv_rgb32 (GstVideofilter * videofilter, void *d, void *s)
}
warptv->tval = (warptv->tval + 1) & 511;
+
+ return ret;
+}
+
+static void
+gst_warptv_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &warptv_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_warptv_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_warptv_src_template));
+}
+
+static void
+gst_warptv_class_init (gpointer klass, gpointer class_data)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_warptv_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_warptv_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_warptv_transform);
+}
+
+static void
+gst_warptv_init (GTypeInstance * instance, gpointer g_class)
+{
+}
+
+GType
+gst_warptv_get_type (void)
+{
+ static GType warptv_type = 0;
+
+ if (!warptv_type) {
+ static const GTypeInfo warptv_info = {
+ sizeof (GstWarpTVClass),
+ gst_warptv_base_init,
+ NULL,
+ gst_warptv_class_init,
+ NULL,
+ NULL,
+ sizeof (GstWarpTV),
+ 0,
+ gst_warptv_init,
+ };
+
+ warptv_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
+ "GstWarpTV", &warptv_info, 0);
+ }
+ return warptv_type;
}
diff --git a/gst/videofilter/Makefile.am b/gst/videofilter/Makefile.am
index 2cc94db8..17b3c3fa 100644
--- a/gst/videofilter/Makefile.am
+++ b/gst/videofilter/Makefile.am
@@ -1,5 +1,5 @@
lib_LTLIBRARIES = libgstvideofilter-@GST_MAJORMINOR@.la
-noinst_LTLIBRARIES = libgstvideoexample.la
+# noinst_LTLIBRARIES = libgstvideoexample.la
plugin_LTLIBRARIES = libgstvideoflip.la
noinst_HEADERS = gstvideofilter.h gstvideoflip.h
@@ -11,14 +11,16 @@ libgstvideofilter_@GST_MAJORMINOR@_la_SOURCES = gstvideofilter.c gstvideofilter.
libgstvideofilter_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS)
libgstvideofilter_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIBS)
-libgstvideoexample_la_SOURCES = gstvideoexample.c
-libgstvideoexample_la_CFLAGS = $(GST_CFLAGS)
-libgstvideoexample_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
-libgstvideoexample_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+# libgstvideoexample_la_SOURCES = gstvideoexample.c
+# libgstvideoexample_la_CFLAGS = $(GST_CFLAGS)
+# libgstvideoexample_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
+# libgstvideoexample_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideoflip_la_SOURCES = gstvideoflip.c
-libgstvideoflip_la_CFLAGS = $(GST_CFLAGS)
-libgstvideoflip_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS)
+libgstvideoflip_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) \
+ $(GST_PLUGINS_BASE_CFLAGS)
+libgstvideoflip_la_LIBADD = libgstvideofilter-@GST_MAJORMINOR@.la $(GST_LIBS) \
+ $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS)
libgstvideoflip_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
# libgstvideobalance_la_SOURCES = gstvideobalance.c
diff --git a/gst/videofilter/gstvideobalance.c b/gst/videofilter/gstvideobalance.c
index 0825aa46..919f13ff 100644
--- a/gst/videofilter/gstvideobalance.c
+++ b/gst/videofilter/gstvideobalance.c
@@ -27,7 +27,6 @@
#include "config.h"
#endif
-/*#define DEBUG_ENABLED */
#include "gstvideobalance.h"
#ifdef HAVE_LIBOIL
#include <liboil/liboil.h>
@@ -40,12 +39,6 @@
/* GstVideobalance signals and args */
enum
{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
ARG_0,
ARG_CONTRAST,
ARG_BRIGHTNESS,
@@ -117,11 +110,6 @@ gst_videobalance_get_type (void)
return videobalance_type;
}
-static GstVideofilterFormat gst_videobalance_formats[] = {
- {"I420", 12, gst_videobalance_planar411,},
-};
-
-
static void
gst_videobalance_base_init (gpointer g_class)
{
diff --git a/gst/videofilter/gstvideobalance.h b/gst/videofilter/gstvideobalance.h
index 45aab910..ac709f88 100644
--- a/gst/videofilter/gstvideobalance.h
+++ b/gst/videofilter/gstvideobalance.h
@@ -21,12 +21,8 @@
#ifndef __GST_VIDEOBALANCE_H__
#define __GST_VIDEOBALANCE_H__
-
-#include <gst/gst.h>
-
#include "gstvideofilter.h"
-
G_BEGIN_DECLS
#define GST_TYPE_VIDEOBALANCE \
diff --git a/gst/videofilter/gstvideofilter.c b/gst/videofilter/gstvideofilter.c
index 89951bb9..a7d9db0e 100644
--- a/gst/videofilter/gstvideofilter.c
+++ b/gst/videofilter/gstvideofilter.c
@@ -22,40 +22,15 @@
#include "config.h"
#endif
-#include <string.h>
-/*#define DEBUG_ENABLED */
#include "gstvideofilter.h"
GST_DEBUG_CATEGORY_STATIC (gst_videofilter_debug);
#define GST_CAT_DEFAULT gst_videofilter_debug
-/* GstVideofilter signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- ARG_0,
- ARG_METHOD
- /* FILL ME */
-};
-
-static void gst_videofilter_base_init (gpointer g_class);
static void gst_videofilter_class_init (gpointer g_class, gpointer class_data);
static void gst_videofilter_init (GTypeInstance * instance, gpointer g_class);
-static void gst_videofilter_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_videofilter_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-static GstFlowReturn gst_videofilter_chain (GstPad * pad, GstBuffer * buffer);
-GstCaps *gst_videofilter_class_get_capslist (GstVideofilterClass * klass);
-
-static GstElementClass *parent_class = NULL;
+static GstBaseTransformClass *parent_class = NULL;
GType
gst_videofilter_get_type (void)
@@ -65,7 +40,7 @@ gst_videofilter_get_type (void)
if (!videofilter_type) {
static const GTypeInfo videofilter_info = {
sizeof (GstVideofilterClass),
- gst_videofilter_base_init,
+ NULL,
NULL,
gst_videofilter_class_init,
NULL,
@@ -75,484 +50,37 @@ gst_videofilter_get_type (void)
gst_videofilter_init,
};
- videofilter_type = g_type_register_static (GST_TYPE_ELEMENT,
+ videofilter_type = g_type_register_static (GST_TYPE_BASE_TRANSFORM,
"GstVideofilter", &videofilter_info, G_TYPE_FLAG_ABSTRACT);
}
return videofilter_type;
}
static void
-gst_videofilter_base_init (gpointer g_class)
-{
- static GstElementDetails videofilter_details = {
- "Video scaler",
- "Filter/Effect/Video",
- "Resizes video",
- "David Schleef <ds@schleef.org>"
- };
- GstVideofilterClass *klass = (GstVideofilterClass *) g_class;
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- klass->formats = g_ptr_array_new ();
-
- gst_element_class_set_details (element_class, &videofilter_details);
-}
-
-static void
gst_videofilter_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
+ GstBaseTransformClass *trans_class;
GstVideofilterClass *klass;
klass = (GstVideofilterClass *) g_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
- parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
-
- gobject_class->set_property = gst_videofilter_set_property;
- gobject_class->get_property = gst_videofilter_get_property;
+ parent_class = g_type_class_peek_parent (klass);
GST_DEBUG_CATEGORY_INIT (gst_videofilter_debug, "videofilter", 0,
"videofilter");
}
-static GstStructure *
-gst_videofilter_format_get_structure (GstVideofilterFormat * format)
-{
- unsigned int fourcc;
- GstStructure *structure;
-
- if (format->filter_func == NULL)
- return NULL;
-
- fourcc =
- GST_MAKE_FOURCC (format->fourcc[0], format->fourcc[1], format->fourcc[2],
- format->fourcc[3]);
-
- if (format->depth) {
- structure = gst_structure_new ("video/x-raw-rgb",
- "depth", G_TYPE_INT, format->depth,
- "bpp", G_TYPE_INT, format->bpp,
- "endianness", G_TYPE_INT, format->endianness,
- "red_mask", G_TYPE_INT, format->red_mask,
- "green_mask", G_TYPE_INT, format->green_mask,
- "blue_mask", G_TYPE_INT, format->blue_mask, NULL);
- } else {
- structure = gst_structure_new ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fourcc, NULL);
- }
-
- gst_structure_set (structure,
- "height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
- "width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
- "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
-
- return structure;
-}
-
-GstCaps *
-gst_videofilter_class_get_capslist (GstVideofilterClass * klass)
-{
- GstCaps *caps;
- GstStructure *structure;
- int i;
-
- caps = gst_caps_new_empty ();
- for (i = 0; i < klass->formats->len; i++) {
- structure =
- gst_videofilter_format_get_structure (g_ptr_array_index (klass->formats,
- i));
- gst_caps_append_structure (caps, structure);
- }
-
- return caps;
-}
-
-static GstCaps *
-gst_videofilter_getcaps (GstPad * pad)
-{
- GstVideofilter *videofilter;
- GstVideofilterClass *klass;
- GstCaps *caps;
- GstPad *peer;
- int i;
-
- videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
- GST_DEBUG_OBJECT (videofilter, "gst_videofilter_getcaps");
-
- klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
-
- /* we can handle anything that was registered */
- caps = gst_caps_new_empty ();
- for (i = 0; i < klass->formats->len; i++) {
- GstCaps *fromcaps;
-
- fromcaps =
- gst_caps_new_full (gst_videofilter_format_get_structure
- (g_ptr_array_index (klass->formats, i)), NULL);
-
- gst_caps_append (caps, fromcaps);
- }
-
- peer = gst_pad_get_peer (pad);
- if (peer) {
- GstCaps *peercaps;
-
- peercaps = gst_pad_get_caps (peer);
- if (peercaps) {
- GstCaps *icaps;
-
- icaps = gst_caps_intersect (peercaps, caps);
- gst_caps_unref (peercaps);
- gst_caps_unref (caps);
- caps = icaps;
- }
- //gst_object_unref (peer);
- }
-
- return caps;
-}
-
-static gboolean
-gst_videofilter_setcaps (GstPad * pad, GstCaps * caps)
-{
- GstVideofilter *videofilter;
- GstStructure *structure;
- int width, height;
- const GValue *framerate;
- int ret;
-
- videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
-
- structure = gst_caps_get_structure (caps, 0);
-
- videofilter->format =
- gst_videofilter_find_format_by_structure (videofilter, structure);
- g_return_val_if_fail (videofilter->format, GST_PAD_LINK_REFUSED);
-
- ret = gst_structure_get_int (structure, "width", &width);
- ret &= gst_structure_get_int (structure, "height", &height);
-
- framerate = gst_structure_get_value (structure, "framerate");
- ret &= (framerate != NULL && GST_VALUE_HOLDS_FRACTION (framerate));
-
- if (!ret)
- return FALSE;
-
- gst_pad_set_caps (videofilter->srcpad, caps);
-
- GST_DEBUG_OBJECT (videofilter, "width %d height %d", width, height);
-
-#if 0
- if (pad == videofilter->srcpad) {
- videofilter->to_width = width;
- videofilter->to_height = height;
- } else {
- videofilter->from_width = width;
- videofilter->from_height = height;
- }
-#endif
- videofilter->to_width = width;
- videofilter->to_height = height;
- videofilter->from_width = width;
- videofilter->from_height = height;
- g_value_copy (framerate, &videofilter->framerate);
-
- gst_videofilter_setup (videofilter);
-
- return TRUE;
-}
-
static void
gst_videofilter_init (GTypeInstance * instance, gpointer g_class)
{
GstVideofilter *videofilter = GST_VIDEOFILTER (instance);
- GstPadTemplate *pad_template;
GST_DEBUG_OBJECT (videofilter, "gst_videofilter_init");
- pad_template =
- gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "sink");
- g_return_if_fail (pad_template != NULL);
- videofilter->sinkpad = gst_pad_new_from_template (pad_template, "sink");
- gst_element_add_pad (GST_ELEMENT (videofilter), videofilter->sinkpad);
- gst_pad_set_chain_function (videofilter->sinkpad, gst_videofilter_chain);
- gst_pad_set_setcaps_function (videofilter->sinkpad, gst_videofilter_setcaps);
- gst_pad_set_getcaps_function (videofilter->sinkpad, gst_videofilter_getcaps);
-
- pad_template =
- gst_element_class_get_pad_template (GST_ELEMENT_CLASS (g_class), "src");
- g_return_if_fail (pad_template != NULL);
- videofilter->srcpad = gst_pad_new_from_template (pad_template, "src");
- gst_element_add_pad (GST_ELEMENT (videofilter), videofilter->srcpad);
- gst_pad_set_getcaps_function (videofilter->srcpad, gst_videofilter_getcaps);
-
videofilter->inited = FALSE;
- g_value_init (&videofilter->framerate, GST_TYPE_FRACTION);
-}
-
-static GstFlowReturn
-gst_videofilter_chain (GstPad * pad, GstBuffer * buf)
-{
- GstVideofilter *videofilter;
- guchar *data;
- gulong size;
- GstBuffer *outbuf;
- GstFlowReturn ret;
-
- videofilter = GST_VIDEOFILTER (GST_PAD_PARENT (pad));
- GST_DEBUG_OBJECT (videofilter, "gst_videofilter_chain");
-
- if (videofilter->passthru) {
- return gst_pad_push (videofilter->srcpad, buf);
- }
-
- if (GST_PAD_CAPS (pad) == NULL) {
- return GST_FLOW_NOT_NEGOTIATED;
- }
-
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
-
- GST_LOG_OBJECT (videofilter, "got buffer of %ld bytes in '%s'", size,
- GST_OBJECT_NAME (videofilter));
-
- GST_LOG_OBJECT (videofilter,
- "size=%ld from=%dx%d to=%dx%d fromsize=%ld (should be %d) tosize=%d",
- size, videofilter->from_width, videofilter->from_height,
- videofilter->to_width, videofilter->to_height, size,
- videofilter->from_buf_size, videofilter->to_buf_size);
-
-
- if (size > videofilter->from_buf_size) {
- GST_INFO_OBJECT (videofilter, "buffer size %ld larger than expected (%d)",
- size, videofilter->from_buf_size);
- return GST_FLOW_ERROR;
- }
-
- ret = gst_pad_alloc_buffer (videofilter->srcpad, GST_BUFFER_OFFSET_NONE,
- videofilter->to_buf_size, GST_PAD_CAPS (videofilter->srcpad), &outbuf);
- if (ret != GST_FLOW_OK)
- goto no_buffer;
-
- g_return_val_if_fail (GST_BUFFER_DATA (outbuf), GST_FLOW_ERROR);
-
- GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
- GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
-
- g_return_val_if_fail (videofilter->format, GST_FLOW_ERROR);
- GST_DEBUG_OBJECT (videofilter, "format %s", videofilter->format->fourcc);
-
- videofilter->in_buf = buf;
- videofilter->out_buf = outbuf;
-
- videofilter->format->filter_func (videofilter, GST_BUFFER_DATA (outbuf),
- data);
- gst_buffer_unref (buf);
-
- GST_LOG_OBJECT (videofilter, "pushing buffer of %d bytes in '%s'",
- GST_BUFFER_SIZE (outbuf), GST_OBJECT_NAME (videofilter));
-
- ret = gst_pad_push (videofilter->srcpad, outbuf);
-
- return ret;
-
-no_buffer:
- {
- return ret;
- }
-}
-
-static void
-gst_videofilter_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstVideofilter *videofilter;
-
- g_return_if_fail (GST_IS_VIDEOFILTER (object));
- videofilter = GST_VIDEOFILTER (object);
-
- GST_DEBUG_OBJECT (videofilter, "gst_videofilter_set_property");
- switch (prop_id) {
- default:
- break;
- }
-}
-
-static void
-gst_videofilter_get_property (GObject * object, guint prop_id, GValue * value,
- GParamSpec * pspec)
-{
- GstVideofilter *videofilter;
-
- g_return_if_fail (GST_IS_VIDEOFILTER (object));
- videofilter = GST_VIDEOFILTER (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-int
-gst_videofilter_get_input_width (GstVideofilter * videofilter)
-{
- g_return_val_if_fail (GST_IS_VIDEOFILTER (videofilter), 0);
-
- return videofilter->from_width;
-}
-
-int
-gst_videofilter_get_input_height (GstVideofilter * videofilter)
-{
- g_return_val_if_fail (GST_IS_VIDEOFILTER (videofilter), 0);
-
- return videofilter->from_height;
-}
-
-void
-gst_videofilter_set_output_size (GstVideofilter * videofilter,
- int width, int height)
-{
- GstCaps *srccaps;
- GstStructure *structure;
-
- g_return_if_fail (GST_IS_VIDEOFILTER (videofilter));
-
- videofilter->to_width = width;
- videofilter->to_height = height;
-
- videofilter->to_buf_size = (videofilter->to_width * videofilter->to_height
- * videofilter->format->bpp) / 8;
-
- //srccaps = gst_caps_copy (gst_pad_get_negotiated_caps (videofilter->srcpad));
- srccaps = gst_caps_copy (GST_PAD_CAPS (videofilter->srcpad));
- structure = gst_caps_get_structure (srccaps, 0);
-
- gst_structure_set (structure, "width", G_TYPE_INT, width,
- "height", G_TYPE_INT, height, NULL);
-
- gst_pad_set_caps (videofilter->srcpad, srccaps);
-}
-
-void
-gst_videofilter_setup (GstVideofilter * videofilter)
-{
- GstVideofilterClass *klass;
-
- GST_DEBUG_OBJECT (videofilter, "setup");
-
- klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
-
- if (klass->setup) {
- GST_DEBUG_OBJECT (videofilter, "calling class setup method");
- klass->setup (videofilter);
- }
-
- if (videofilter->to_width == 0) {
- videofilter->to_width = videofilter->from_width;
- }
- if (videofilter->to_height == 0) {
- videofilter->to_height = videofilter->from_height;
- }
-
- g_return_if_fail (videofilter->format != NULL);
- g_return_if_fail (videofilter->from_width > 0);
- g_return_if_fail (videofilter->from_height > 0);
- g_return_if_fail (videofilter->to_width > 0);
- g_return_if_fail (videofilter->to_height > 0);
-
- videofilter->from_buf_size =
- (videofilter->from_width * videofilter->from_height *
- videofilter->format->bpp) / 8;
- videofilter->to_buf_size =
- (videofilter->to_width * videofilter->to_height *
- videofilter->format->bpp) / 8;
-
- GST_DEBUG_OBJECT (videofilter, "from_buf_size %d to_buf_size %d",
- videofilter->from_buf_size, videofilter->to_buf_size);
- videofilter->inited = TRUE;
-}
-
-GstVideofilterFormat *
-gst_videofilter_find_format_by_structure (GstVideofilter * videofilter,
- const GstStructure * structure)
-{
- int i;
- GstVideofilterClass *klass;
- GstVideofilterFormat *format;
- gboolean ret;
-
- klass = GST_VIDEOFILTER_CLASS (G_OBJECT_GET_CLASS (videofilter));
-
- g_return_val_if_fail (structure != NULL, NULL);
-
- if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0) {
- guint32 fourcc;
-
- ret = gst_structure_get_fourcc (structure, "format", &fourcc);
- if (!ret)
- return NULL;
- for (i = 0; i < klass->formats->len; i++) {
- guint32 format_fourcc;
-
- format = g_ptr_array_index (klass->formats, i);
- format_fourcc = GST_STR_FOURCC (format->fourcc);
- if (format->depth == 0 && format_fourcc == fourcc) {
- return format;
- }
- }
- } else if (strcmp (gst_structure_get_name (structure), "video/x-raw-rgb")
- == 0) {
- int bpp;
- int depth;
- int endianness;
- int red_mask;
- int green_mask;
- int blue_mask;
-
- ret = gst_structure_get_int (structure, "bpp", &bpp);
- ret &= gst_structure_get_int (structure, "depth", &depth);
- ret &= gst_structure_get_int (structure, "endianness", &endianness);
- ret &= gst_structure_get_int (structure, "red_mask", &red_mask);
- ret &= gst_structure_get_int (structure, "green_mask", &green_mask);
- ret &= gst_structure_get_int (structure, "blue_mask", &blue_mask);
- if (!ret)
- return NULL;
- for (i = 0; i < klass->formats->len; i++) {
- format = g_ptr_array_index (klass->formats, i);
- if (format->bpp == bpp && format->depth == depth &&
- format->endianness == endianness && format->red_mask == red_mask &&
- format->green_mask == green_mask && format->blue_mask == blue_mask) {
- return format;
- }
- }
- }
-
- return NULL;
-}
-
-void
-gst_videofilter_class_add_format (GstVideofilterClass * videofilterclass,
- GstVideofilterFormat * format)
-{
- g_ptr_array_add (videofilterclass->formats, format);
-}
-
-void
-gst_videofilter_class_add_pad_templates (GstVideofilterClass *
- videofilter_class)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (videofilter_class);
-
- gst_element_class_add_pad_template (element_class,
- gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- gst_videofilter_class_get_capslist (videofilter_class)));
-
- gst_element_class_add_pad_template (element_class,
- gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
- gst_videofilter_class_get_capslist (videofilter_class)));
}
diff --git a/gst/videofilter/gstvideofilter.h b/gst/videofilter/gstvideofilter.h
index 9923a2a7..a2f9ef04 100644
--- a/gst/videofilter/gstvideofilter.h
+++ b/gst/videofilter/gstvideofilter.h
@@ -21,32 +21,13 @@
#ifndef __GST_VIDEOFILTER_H__
#define __GST_VIDEOFILTER_H__
-
-#include <gst/gst.h>
-
+#include <gst/base/gstbasetransform.h>
G_BEGIN_DECLS
typedef struct _GstVideofilter GstVideofilter;
typedef struct _GstVideofilterClass GstVideofilterClass;
-typedef void (*GstVideofilterFilterFunc)(GstVideofilter *filter,
- void *out_data, void *in_data);
-
-typedef void (*GstVideofilterSetupFunc)(GstVideofilter *filter);
-
-typedef struct _GstVideofilterFormat GstVideofilterFormat;
-struct _GstVideofilterFormat {
- char *fourcc;
- int bpp;
- GstVideofilterFilterFunc filter_func;
- int depth;
- unsigned int endianness;
- unsigned int red_mask;
- unsigned int green_mask;
- unsigned int blue_mask;
-};
-
#define GST_TYPE_VIDEOFILTER \
(gst_videofilter_get_type())
#define GST_VIDEOFILTER(obj) \
@@ -59,51 +40,17 @@ struct _GstVideofilterFormat {
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEOFILTER))
struct _GstVideofilter {
- GstElement element;
-
- GstPad *sinkpad,*srcpad;
+ GstBaseTransform element;
- /* video state */
gboolean inited;
- GstVideofilterFormat *format;
- gint to_width;
- gint to_height;
- gint from_width;
- gint from_height;
- gboolean passthru;
-
- /* private */
- gint from_buf_size;
- gint to_buf_size;
- GValue framerate;
-
- GstBuffer *in_buf;
- GstBuffer *out_buf;
};
struct _GstVideofilterClass {
- GstElementClass parent_class;
-
- GPtrArray *formats;
- GstVideofilterSetupFunc setup;
+ GstBaseTransformClass parent_class;
};
GType gst_videofilter_get_type(void);
-int gst_videofilter_get_input_width(GstVideofilter *videofilter);
-int gst_videofilter_get_input_height(GstVideofilter *videofilter);
-void gst_videofilter_set_output_size(GstVideofilter *videofilter,
- int width, int height);
-GstVideofilterFormat *gst_videofilter_find_format_by_structure (GstVideofilter *filter,
- const GstStructure *structure);
-GstCaps *gst_videofilter_class_get_capslist(GstVideofilterClass *videofilterclass);
-void gst_videofilter_setup (GstVideofilter * videofilter);
-
-void gst_videofilter_class_add_format(GstVideofilterClass *videofilterclass,
- GstVideofilterFormat *format);
-void gst_videofilter_class_add_pad_templates (GstVideofilterClass *videofilterclass);
-
G_END_DECLS
#endif /* __GST_VIDEOFILTER_H__ */
-
diff --git a/gst/videofilter/gstvideoflip.c b/gst/videofilter/gstvideoflip.c
index da9a76b9..d0f3263e 100644
--- a/gst/videofilter/gstvideoflip.c
+++ b/gst/videofilter/gstvideoflip.c
@@ -27,17 +27,11 @@
#include "config.h"
#endif
-/*#define DEBUG_ENABLED */
#include "gstvideoflip.h"
-#include <string.h>
-/* GstVideoflip signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
+#include <gst/video/video.h>
+/* GstVideoflip signals and args */
enum
{
ARG_0,
@@ -45,21 +39,30 @@ enum
/* FILL ME */
};
-GST_DEBUG_CATEGORY_STATIC (gst_videoflip_debug);
-#define GST_CAT_DEFAULT gst_videoflip_debug
+GST_DEBUG_CATEGORY (videoflip_debug);
+#define GST_CAT_DEFAULT videoflip_debug
-static void gst_videoflip_base_init (gpointer g_class);
-static void gst_videoflip_class_init (gpointer g_class, gpointer class_data);
-static void gst_videoflip_init (GTypeInstance * instance, gpointer g_class);
+static GstElementDetails videoflip_details =
+GST_ELEMENT_DETAILS ("Video Flipper",
+ "Filter/Effect/Video",
+ "Flips and rotates video",
+ "David Schleef <ds@schleef.org>");
+
+static GstStaticPadTemplate gst_videoflip_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, YV12 }"))
+ );
-static void gst_videoflip_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_videoflip_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
+static GstStaticPadTemplate gst_videoflip_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ IYUV, I420, YV12 }"))
+ );
-static void gst_videoflip_planar411 (GstVideofilter * videofilter, void *dest,
- void *src);
-static void gst_videoflip_setup (GstVideofilter * videofilter);
+static GstVideofilterClass *parent_class = NULL;
#define GST_TYPE_VIDEOFLIP_METHOD (gst_videoflip_method_get_type())
@@ -72,7 +75,7 @@ gst_videoflip_method_get_type (void)
{GST_VIDEOFLIP_METHOD_90R, "Rotate clockwise 90 degrees", "clockwise"},
{GST_VIDEOFLIP_METHOD_180, "Rotate 180 degrees", "rotate-180"},
{GST_VIDEOFLIP_METHOD_90L, "Rotate counter-clockwise 90 degrees",
- "counterclockwise"},
+ "counterclockwise"},
{GST_VIDEOFLIP_METHOD_HORIZ, "Flip horizontally", "horizontal-flip"},
{GST_VIDEOFLIP_METHOD_VERT, "Flip vertically", "vertical-flip"},
{GST_VIDEOFLIP_METHOD_TRANS,
@@ -89,90 +92,318 @@ gst_videoflip_method_get_type (void)
return videoflip_method_type;
}
-GType
-gst_videoflip_get_type (void)
+static gboolean
+gst_videoflip_set_caps (GstBaseTransform * btrans, GstCaps * incaps,
+ GstCaps * outcaps)
{
- static GType videoflip_type = 0;
+ GstVideoflip *vf;
+ GstStructure *in_s, *out_s;
+ gboolean ret = FALSE;
+
+ vf = GST_VIDEOFLIP (btrans);
+
+ in_s = gst_caps_get_structure (incaps, 0);
+ out_s = gst_caps_get_structure (outcaps, 0);
+
+ if (gst_structure_get_int (in_s, "width", &vf->from_width) &&
+ gst_structure_get_int (in_s, "height", &vf->from_height) &&
+ gst_structure_get_int (out_s, "width", &vf->to_width) &&
+ gst_structure_get_int (out_s, "height", &vf->to_height)) {
+ /* Check that they are correct */
+ switch (vf->method) {
+ case GST_VIDEOFLIP_METHOD_90R:
+ case GST_VIDEOFLIP_METHOD_90L:
+ case GST_VIDEOFLIP_METHOD_TRANS:
+ case GST_VIDEOFLIP_METHOD_OTHER:
+ if ((vf->from_width != vf->to_height) ||
+ (vf->from_height != vf->to_width)) {
+ GST_DEBUG_OBJECT (vf, "we are inverting width and height but caps "
+ "are not correct : %dx%d to %dx%d", vf->from_width,
+ vf->from_height, vf->to_width, vf->to_height);
+ goto beach;
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_IDENTITY:
+
+ break;
+ case GST_VIDEOFLIP_METHOD_180:
+ case GST_VIDEOFLIP_METHOD_HORIZ:
+ case GST_VIDEOFLIP_METHOD_VERT:
+ if ((vf->from_width != vf->to_width) ||
+ (vf->from_height != vf->to_height)) {
+ GST_DEBUG_OBJECT (vf, "we are keeping width and height but caps "
+ "are not correct : %dx%d to %dx%d", vf->from_width,
+ vf->from_height, vf->to_width, vf->to_height);
+ goto beach;
+ }
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
- if (!videoflip_type) {
- static const GTypeInfo videoflip_info = {
- sizeof (GstVideoflipClass),
- gst_videoflip_base_init,
- NULL,
- gst_videoflip_class_init,
- NULL,
- NULL,
- sizeof (GstVideoflip),
- 0,
- gst_videoflip_init,
- };
+ ret = TRUE;
- videoflip_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
- "GstVideoflip", &videoflip_info, 0);
+beach:
+ return ret;
+}
+
+static GstCaps *
+gst_videoflip_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps)
+{
+ GstVideoflip *videoflip;
+ GstCaps *ret;
+ gint width, height, i;
+
+ videoflip = GST_VIDEOFLIP (trans);
+
+ ret = gst_caps_copy (caps);
+
+ for (i = 0; i < gst_caps_get_size (ret); i++) {
+ GstStructure *structure = gst_caps_get_structure (ret, i);
+
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+
+ switch (videoflip->method) {
+ case GST_VIDEOFLIP_METHOD_90R:
+ case GST_VIDEOFLIP_METHOD_90L:
+ case GST_VIDEOFLIP_METHOD_TRANS:
+ case GST_VIDEOFLIP_METHOD_OTHER:
+ gst_structure_set (structure, "width", G_TYPE_INT, height,
+ "height", G_TYPE_INT, width, NULL);
+ break;
+ case GST_VIDEOFLIP_METHOD_IDENTITY:
+ case GST_VIDEOFLIP_METHOD_180:
+ case GST_VIDEOFLIP_METHOD_HORIZ:
+ case GST_VIDEOFLIP_METHOD_VERT:
+ gst_structure_set (structure, "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height, NULL);
+ break;
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+ }
}
- return videoflip_type;
+
+ GST_DEBUG_OBJECT (videoflip, "transformed %" GST_PTR_FORMAT " to %"
+ GST_PTR_FORMAT, caps, ret);
+
+ return ret;
}
-static GstVideofilterFormat gst_videoflip_formats[] = {
- /* planar */
- {"YV12", 12, gst_videoflip_planar411,},
- {"I420", 12, gst_videoflip_planar411,},
- {"IYUV", 12, gst_videoflip_planar411,},
-};
+/* Useful macros */
+#define GST_VIDEO_I420_Y_ROWSTRIDE(width) (GST_ROUND_UP_4(width))
+#define GST_VIDEO_I420_U_ROWSTRIDE(width) (GST_ROUND_UP_8(width)/2)
+#define GST_VIDEO_I420_V_ROWSTRIDE(width) ((GST_ROUND_UP_8(GST_VIDEO_I420_Y_ROWSTRIDE(width)))/2)
-static void
-gst_videoflip_base_init (gpointer g_class)
+#define GST_VIDEO_I420_Y_OFFSET(w,h) (0)
+#define GST_VIDEO_I420_U_OFFSET(w,h) (GST_VIDEO_I420_Y_OFFSET(w,h)+(GST_VIDEO_I420_Y_ROWSTRIDE(w)*GST_ROUND_UP_2(h)))
+#define GST_VIDEO_I420_V_OFFSET(w,h) (GST_VIDEO_I420_U_OFFSET(w,h)+(GST_VIDEO_I420_U_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+
+#define GST_VIDEO_I420_SIZE(w,h) (GST_VIDEO_I420_V_OFFSET(w,h)+(GST_VIDEO_I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
+
+static gboolean
+gst_videoflip_get_unit_size (GstBaseTransform * btrans, GstCaps * caps,
+ guint * size)
{
- static GstElementDetails videoflip_details =
- GST_ELEMENT_DETAILS ("Video Flipper",
- "Filter/Effect/Video",
- "Flips and rotates video",
- "David Schleef <ds@schleef.org>");
- GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
- GstVideofilterClass *videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
- int i;
+ GstVideoflip *videoflip;
+ GstStructure *structure;
+ gboolean ret = FALSE;
+ gint width, height;
- gst_element_class_set_details (element_class, &videoflip_details);
+ videoflip = GST_VIDEOFLIP (btrans);
+
+ structure = gst_caps_get_structure (caps, 0);
- for (i = 0; i < G_N_ELEMENTS (gst_videoflip_formats); i++) {
- gst_videofilter_class_add_format (videofilter_class,
- gst_videoflip_formats + i);
+ if (gst_structure_get_int (structure, "width", &width) &&
+ gst_structure_get_int (structure, "height", &height)) {
+ *size = GST_VIDEO_I420_SIZE (width, height);
+ ret = TRUE;
+ GST_DEBUG_OBJECT (videoflip, "our frame size is %d bytes (%dx%d)", *size,
+ width, height);
}
- gst_videofilter_class_add_pad_templates (GST_VIDEOFILTER_CLASS (g_class));
+ return ret;
}
-static void
-gst_videoflip_class_init (gpointer g_class, gpointer class_data)
+static GstFlowReturn
+gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest,
+ unsigned char *src, int sw, int sh, int dw, int dh)
{
- GObjectClass *gobject_class;
- GstVideofilterClass *videofilter_class;
+ GstFlowReturn ret = GST_FLOW_OK;
+ int x, y;
+
+ switch (videoflip->method) {
+ case GST_VIDEOFLIP_METHOD_90R:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[(sh - 1 - x) * sw + y];
+ }
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_90L:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[x * sw + (sw - 1 - y)];
+ }
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_180:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[(sh - 1 - y) * sw + (sw - 1 - x)];
+ }
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_HORIZ:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[y * sw + (sw - 1 - x)];
+ }
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_VERT:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[(sh - 1 - y) * sw + x];
+ }
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_TRANS:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[x * sw + y];
+ }
+ }
+ break;
+ case GST_VIDEOFLIP_METHOD_OTHER:
+ for (y = 0; y < dh; y++) {
+ for (x = 0; x < dw; x++) {
+ dest[y * dw + x] = src[(sh - 1 - x) * sw + (sw - 1 - y)];
+ }
+ }
+ break;
+ default:
+ ret = GST_FLOW_ERROR;
+ break;
+ }
- gobject_class = G_OBJECT_CLASS (g_class);
- videofilter_class = GST_VIDEOFILTER_CLASS (g_class);
+ return ret;
+}
- gobject_class->set_property = gst_videoflip_set_property;
- gobject_class->get_property = gst_videoflip_get_property;
+static GstFlowReturn
+gst_videoflip_transform (GstBaseTransform * trans, GstBuffer * in,
+ GstBuffer * out)
+{
+ GstVideoflip *videoflip;
+ gpointer dest, src;
+ int sw, sh, dw, dh;
+ GstFlowReturn ret = GST_FLOW_OK;
- g_object_class_install_property (gobject_class, ARG_METHOD,
- g_param_spec_enum ("method", "method", "method",
- GST_TYPE_VIDEOFLIP_METHOD, GST_VIDEOFLIP_METHOD_90R,
- G_PARAM_READWRITE));
+ videoflip = GST_VIDEOFLIP (trans);
+
+ gst_buffer_stamp (out, in);
+
+ src = GST_BUFFER_DATA (in);
+ dest = GST_BUFFER_DATA (out);
+ sw = videoflip->from_width;
+ sh = videoflip->from_height;
+ dw = videoflip->to_width;
+ dh = videoflip->to_height;
+
+ GST_LOG_OBJECT (videoflip, "videoflip: scaling planar 4:1:1 %dx%d to %dx%d",
+ sw, sh, dw, dh);
+
+ ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+
+ src += sw * sh;
+ dest += dw * dh;
+
+ dh = dh >> 1;
+ dw = dw >> 1;
+ sh = sh >> 1;
+ sw = sw >> 1;
- videofilter_class->setup = gst_videoflip_setup;
+ ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
+ if (ret != GST_FLOW_OK)
+ goto beach;
+
+ src += sw * sh;
+ dest += dw * dh;
+
+ ret = gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
+
+beach:
+ return ret;
}
-static void
-gst_videoflip_init (GTypeInstance * instance, gpointer g_class)
+static gboolean
+gst_videoflip_handle_src_event (GstPad * pad, GstEvent * event)
{
- GstVideoflip *videoflip = GST_VIDEOFLIP (instance);
- GstVideofilter *videofilter;
+ GstVideoflip *vf;
+ gboolean ret;
+ gdouble x, y;
+ GstStructure *structure;
+
+ vf = GST_VIDEOFLIP (gst_pad_get_parent (pad));
+
+ GST_DEBUG_OBJECT (vf, "handling %s event", GST_EVENT_TYPE_NAME (event));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_NAVIGATION:
+ event =
+ GST_EVENT (gst_mini_object_make_writable (GST_MINI_OBJECT (event)));
+
+ structure = (GstStructure *) gst_event_get_structure (event);
+ if (gst_structure_get_double (structure, "pointer_x", &x) &&
+ gst_structure_get_double (structure, "pointer_y", &y)) {
+ switch (vf->method) {
+ case GST_VIDEOFLIP_METHOD_90R:
+ case GST_VIDEOFLIP_METHOD_OTHER:
+ x = y;
+ y = vf->to_width - x;
+ break;
+ case GST_VIDEOFLIP_METHOD_90L:
+ case GST_VIDEOFLIP_METHOD_TRANS:
+ x = vf->to_height - y;
+ y = x;
+ break;
+ case GST_VIDEOFLIP_METHOD_180:
+ x = vf->to_width - x;
+ y = vf->to_height - y;
+ break;
+ case GST_VIDEOFLIP_METHOD_HORIZ:
+ x = vf->to_width - x;
+ y = y;
+ break;
+ case GST_VIDEOFLIP_METHOD_VERT:
+ x = x;
+ y = vf->to_height - y;
+ break;
+ default:
+ x = x;
+ y = y;
+ break;
+ }
+ gst_structure_set (structure, "pointer_x", G_TYPE_DOUBLE, x,
+ "pointer_y", G_TYPE_DOUBLE, y, NULL);
+ }
+ break;
+ default:
+ break;
+ }
- GST_DEBUG_OBJECT (videoflip, "gst_videoflip_init");
+ ret = gst_pad_event_default (pad, event);
- videofilter = GST_VIDEOFILTER (videoflip);
+ gst_object_unref (vf);
- /* do stuff */
+ return ret;
}
static void
@@ -186,16 +417,25 @@ gst_videoflip_set_property (GObject * object, guint prop_id,
videoflip = GST_VIDEOFLIP (object);
videofilter = GST_VIDEOFILTER (object);
- GST_DEBUG_OBJECT (videoflip, "gst_videoflip_set_property");
switch (prop_id) {
case ARG_METHOD:
- videoflip->method = g_value_get_enum (value);
- if (videofilter->inited) {
- GST_DEBUG_OBJECT (videoflip, "setting up videoflip again");
- gst_videofilter_setup (videofilter);
+ {
+ GstVideoflipMethod method;
+
+ method = g_value_get_enum (value);
+ if (method != videoflip->method) {
+ GstBaseTransform *btrans = GST_BASE_TRANSFORM (videoflip);
+
+ g_mutex_lock (btrans->transform_lock);
+ gst_pad_set_caps (btrans->sinkpad, NULL);
+ gst_pad_set_caps (btrans->srcpad, NULL);
+ g_mutex_unlock (btrans->transform_lock);
+ videoflip->method = method;
}
+ }
break;
default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
@@ -219,167 +459,94 @@ gst_videoflip_get_property (GObject * object, guint prop_id, GValue * value,
}
}
-static gboolean
-plugin_init (GstPlugin * plugin)
+static void
+gst_videoflip_base_init (gpointer g_class)
{
- GST_DEBUG_CATEGORY_INIT (gst_videoflip_debug, "videoflip", 0, "videoflip");
-
- return gst_element_register (plugin, "videoflip", GST_RANK_NONE,
- GST_TYPE_VIDEOFLIP);
-}
-
-GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
- GST_VERSION_MINOR,
- "videoflip",
- "Flips and rotates video",
- plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
-static void gst_videoflip_flip (GstVideoflip * videoflip,
- unsigned char *dest, unsigned char *src, int sw, int sh, int dw, int dh);
+ gst_element_class_set_details (element_class, &videoflip_details);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_videoflip_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_videoflip_src_template));
+}
static void
-gst_videoflip_setup (GstVideofilter * videofilter)
+gst_videoflip_class_init (gpointer klass, gpointer class_data)
{
- int from_width, from_height;
- GstVideoflip *videoflip;
-
- videoflip = GST_VIDEOFLIP (videofilter);
- GST_DEBUG_OBJECT (videoflip, "gst_videoflip_setup");
+ GObjectClass *gobject_class;
+ GstBaseTransformClass *trans_class;
- from_width = gst_videofilter_get_input_width (videofilter);
- from_height = gst_videofilter_get_input_height (videofilter);
+ gobject_class = (GObjectClass *) klass;
+ trans_class = (GstBaseTransformClass *) klass;
- if (from_width == 0 || from_height == 0) {
- return;
- }
+ parent_class = g_type_class_peek_parent (klass);
- switch (videoflip->method) {
- case GST_VIDEOFLIP_METHOD_90R:
- case GST_VIDEOFLIP_METHOD_90L:
- case GST_VIDEOFLIP_METHOD_TRANS:
- case GST_VIDEOFLIP_METHOD_OTHER:
- gst_videofilter_set_output_size (videofilter, from_height, from_width);
- break;
- case GST_VIDEOFLIP_METHOD_IDENTITY:
- case GST_VIDEOFLIP_METHOD_180:
- case GST_VIDEOFLIP_METHOD_HORIZ:
- case GST_VIDEOFLIP_METHOD_VERT:
- gst_videofilter_set_output_size (videofilter, from_width, from_height);
- break;
- default:
- g_assert_not_reached ();
- break;
- }
+ gobject_class->set_property = gst_videoflip_set_property;
+ gobject_class->get_property = gst_videoflip_get_property;
- GST_DEBUG_OBJECT (videoflip, "format=%p \"%s\" from %dx%d to %dx%d",
- videofilter->format, videofilter->format->fourcc,
- from_width, from_height, videofilter->to_width, videofilter->to_height);
+ g_object_class_install_property (gobject_class, ARG_METHOD,
+ g_param_spec_enum ("method", "method", "method",
+ GST_TYPE_VIDEOFLIP_METHOD, GST_VIDEOFLIP_METHOD_90R,
+ G_PARAM_READWRITE));
- if (videoflip->method == GST_VIDEOFLIP_METHOD_IDENTITY) {
- GST_DEBUG_OBJECT (videoflip, "videoflip: using passthru");
- videofilter->passthru = TRUE;
- } else {
- videofilter->passthru = FALSE;
- }
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_videoflip_transform_caps);
+ trans_class->set_caps = GST_DEBUG_FUNCPTR (gst_videoflip_set_caps);
+ trans_class->get_unit_size = GST_DEBUG_FUNCPTR (gst_videoflip_get_unit_size);
+ trans_class->transform = GST_DEBUG_FUNCPTR (gst_videoflip_transform);
}
static void
-gst_videoflip_planar411 (GstVideofilter * videofilter, void *dest, void *src)
+gst_videoflip_init (GTypeInstance * instance, gpointer g_class)
{
- GstVideoflip *videoflip;
- int sw;
- int sh;
- int dw;
- int dh;
-
- g_return_if_fail (GST_IS_VIDEOFLIP (videofilter));
- videoflip = GST_VIDEOFLIP (videofilter);
-
- sw = videofilter->from_width;
- sh = videofilter->from_height;
- dw = videofilter->to_width;
- dh = videofilter->to_height;
-
- GST_LOG_OBJECT (videoflip, "videoflip: scaling planar 4:1:1 %dx%d to %dx%d",
- sw, sh, dw, dh);
-
- gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
+ GstVideoflip *videoflip = GST_VIDEOFLIP (instance);
+ GstBaseTransform *btrans = GST_BASE_TRANSFORM (instance);
- src += sw * sh;
- dest += dw * dh;
+ GST_DEBUG_OBJECT (videoflip, "gst_videoflip_init");
- dh = dh >> 1;
- dw = dw >> 1;
- sh = sh >> 1;
- sw = sw >> 1;
+ videoflip->method = GST_VIDEOFLIP_METHOD_90R;
- gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
+ gst_pad_set_event_function (btrans->srcpad,
+ GST_DEBUG_FUNCPTR (gst_videoflip_handle_src_event));
+}
- src += sw * sh;
- dest += dw * dh;
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ GST_DEBUG_CATEGORY_INIT (videoflip_debug, "videoflip", 0, "videoflip");
- gst_videoflip_flip (videoflip, dest, src, sw, sh, dw, dh);
+ return gst_element_register (plugin, "videoflip", GST_RANK_NONE,
+ GST_TYPE_VIDEOFLIP);
}
-static void
-gst_videoflip_flip (GstVideoflip * videoflip, unsigned char *dest,
- unsigned char *src, int sw, int sh, int dw, int dh)
+GType
+gst_videoflip_get_type (void)
{
- int x, y;
+ static GType videoflip_type = 0;
- switch (videoflip->method) {
- case GST_VIDEOFLIP_METHOD_90R:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[(sh - 1 - x) * sw + y];
- }
- }
- break;
- case GST_VIDEOFLIP_METHOD_90L:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[x * sw + (sw - 1 - y)];
- }
- }
- break;
- case GST_VIDEOFLIP_METHOD_180:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[(sh - 1 - y) * sw + (sw - 1 - x)];
- }
- }
- break;
- case GST_VIDEOFLIP_METHOD_HORIZ:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[y * sw + (sw - 1 - x)];
- }
- }
- break;
- case GST_VIDEOFLIP_METHOD_VERT:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[(sh - 1 - y) * sw + x];
- }
- }
- break;
- case GST_VIDEOFLIP_METHOD_TRANS:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[x * sw + y];
- }
- }
- break;
- case GST_VIDEOFLIP_METHOD_OTHER:
- for (y = 0; y < dh; y++) {
- for (x = 0; x < dw; x++) {
- dest[y * dw + x] = src[(sh - 1 - x) * sw + (sw - 1 - y)];
- }
- }
- break;
- default:
- /* FIXME */
- break;
+ if (!videoflip_type) {
+ static const GTypeInfo videoflip_info = {
+ sizeof (GstVideoflipClass),
+ gst_videoflip_base_init,
+ NULL,
+ gst_videoflip_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVideoflip),
+ 0,
+ gst_videoflip_init,
+ };
+
+ videoflip_type = g_type_register_static (GST_TYPE_VIDEOFILTER,
+ "GstVideoflip", &videoflip_info, 0);
}
+ return videoflip_type;
}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "videoflip",
+ "Flips and rotates video",
+ plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
diff --git a/gst/videofilter/gstvideoflip.h b/gst/videofilter/gstvideoflip.h
index 83b37718..d21ed4ac 100644
--- a/gst/videofilter/gstvideoflip.h
+++ b/gst/videofilter/gstvideoflip.h
@@ -17,16 +17,11 @@
* Boston, MA 02111-1307, USA.
*/
-
#ifndef __GST_VIDEOFLIP_H__
#define __GST_VIDEOFLIP_H__
-
-#include <gst/gst.h>
-
#include "gstvideofilter.h"
-
G_BEGIN_DECLS
typedef enum {
@@ -37,7 +32,7 @@ typedef enum {
GST_VIDEOFLIP_METHOD_HORIZ,
GST_VIDEOFLIP_METHOD_VERT,
GST_VIDEOFLIP_METHOD_TRANS,
- GST_VIDEOFLIP_METHOD_OTHER,
+ GST_VIDEOFLIP_METHOD_OTHER
} GstVideoflipMethod;
#define GST_TYPE_VIDEOFLIP \
@@ -56,7 +51,10 @@ typedef struct _GstVideoflipClass GstVideoflipClass;
struct _GstVideoflip {
GstVideofilter videofilter;
-
+
+ gint from_width, from_height;
+ gint to_width, to_height;
+
GstVideoflipMethod method;
};
@@ -69,4 +67,3 @@ GType gst_videoflip_get_type(void);
G_END_DECLS
#endif /* __GST_VIDEOFLIP_H__ */
-