[longomatch] Use a custom non linear editor instead of gnonlin
- From: Andoni Morales Alastruey <amorales src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [longomatch] Use a custom non linear editor instead of gnonlin
- Date: Sun, 29 Dec 2013 17:24:56 +0000 (UTC)
commit e45a6c4e42e70d74ad5c70e3abfaa87765fb0699
Author: Andoni Morales Alastruey <ylatuya gmail com>
Date: Sun Dec 29 13:01:51 2013 +0100
Use a custom non linear editor instead of gnonlin
libcesarplayer/Makefile.am | 2 +
libcesarplayer/gst-nle-source.c | 704 +++++++++++++++++++++++++++++++++++++
libcesarplayer/gst-nle-source.h | 114 ++++++
libcesarplayer/gst-video-editor.c | 402 ++++-----------------
libcesarplayer/video-utils.c | 7 +-
5 files changed, 899 insertions(+), 330 deletions(-)
---
diff --git a/libcesarplayer/Makefile.am b/libcesarplayer/Makefile.am
index 48a5bda..39349fe 100644
--- a/libcesarplayer/Makefile.am
+++ b/libcesarplayer/Makefile.am
@@ -43,6 +43,8 @@ libcesarplayer_la_SOURCES = \
gst-video-editor.h\
gst-video-encoder.c\
gst-video-encoder.h\
+ gst-nle-source.c\
+ gst-nle-source.h\
video-utils.c\
video-utils.h\
macros.h
diff --git a/libcesarplayer/gst-nle-source.c b/libcesarplayer/gst-nle-source.c
new file mode 100644
index 0000000..5368a5e
--- /dev/null
+++ b/libcesarplayer/gst-nle-source.c
@@ -0,0 +1,704 @@
+/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
+/*
+* Gstreamer NLE source
+* Copyright (C) Andoni Morales Alastruey 2013 <ylatuya gmail com>
+*
+* Gstreamer DV capturer is free software.
+*
+* You may redistribute it and/or modify it under the terms of the
+* GNU General Public License, as published by the Free Software
+* Foundation; either version 2 of the License, or (at your option)
+* any later version.
+*
+* Gstreamer DV is distributed in the hope that it will be useful,
+* but WITHOUT ANY WARRANTY; without even the implied warranty of
+* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+* See the GNU General Public License for more details.
+*
+* You should have received a copy of the GNU General Public License
+* along with foob. If not, write to:
+* The Free Software Foundation, Inc.,
+* 51 Franklin Street, Fifth Floor
+* Boston, MA 02110-1301, USA.
+*/
+
+#include <string.h>
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/app/gstappsink.h>
+#include <gst/app/gstappsrc.h>
+
+#include "video-utils.h"
+#include "gst-nle-source.h"
+
+GST_DEBUG_CATEGORY (_nlesrc_gst_debug_cat);
+#define GST_CAT_DEFAULT _nlesrc_gst_debug_cat
+
+#define CHANNELS 2
+#define DEPTH 16
+#define RATE 44100
+#define BITS_PER_SAMPLE DEPTH*CHANNELS*RATE
+#define AUDIO_CAPS_STR "audio/x-raw-int, endianness=1234, signed=true, "\
+ " width=16, depth=16, rate=44100, channels=2"
+
+static GstStaticPadTemplate video_tpl = GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS ("video/x-raw-yuv, format=(fourcc)I420, "
+ "width=[1,2160], height=[1,2160], framerate={25/1, 30/1}"));
+
+static GstStaticPadTemplate audio_tpl = GST_STATIC_PAD_TEMPLATE ("audio",
+ GST_PAD_SRC,
+ GST_PAD_SOMETIMES,
+ GST_STATIC_CAPS (AUDIO_CAPS_STR));
+
+
+typedef struct
+{
+ gchar *file_path;
+ gchar *title;
+ guint64 start;
+ guint64 stop;
+ guint64 duration;
+ gfloat rate;
+ gboolean still_picture;
+} GstNleSrcItem;
+
+static GstBinClass *parent_class = NULL;
+
+static void gst_nle_source_dispose (GObject * object);
+static GstStateChangeReturn gst_nle_source_change_state
+ (GstElement * element, GstStateChange transition);
+static void gst_nle_source_next (GstNleSource * nlesrc);
+static void gst_nle_source_next_threaded (GstNleSource * nlesrc);
+static void gst_nle_source_no_more_pads (GstElement * element,
+ GstNleSource * nlesrc);
+static void gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
+ GstNleSource * nlesrc);
+
+G_DEFINE_TYPE (GstNleSource, gst_nle_source, GST_TYPE_BIN);
+
+static GstNleSrcItem *
+gst_nle_source_item_new (const gchar * file_path, const gchar * title,
+ guint64 start, guint64 stop, gfloat rate, gboolean still_picture)
+{
+ GstNleSrcItem *item;
+
+ item = g_new0 (GstNleSrcItem, 1);
+ item->file_path = g_strdup (file_path);
+ item->title = g_strdup (title);
+ item->start = start;
+ item->stop = stop;
+ item->rate = rate;
+ item->still_picture = still_picture;
+ if (still_picture) {
+ item->rate = 1;
+ }
+ item->duration = stop - start;
+ return item;
+}
+
+static void
+gst_nle_source_item_free (GstNleSrcItem * item)
+{
+ if (item->file_path != NULL)
+ g_free (item->file_path);
+ if (item->title != NULL)
+ g_free (item->title);
+ g_free (item);
+}
+
+static void
+gst_nle_source_init (GstNleSource * nlesrc)
+{
+ nlesrc->video_pad = gst_ghost_pad_new_no_target_from_template ("video",
+ gst_static_pad_template_get (&video_tpl));
+ nlesrc->audio_pad = gst_ghost_pad_new_no_target_from_template ("audio",
+ gst_static_pad_template_get (&audio_tpl));
+ g_mutex_init (&nlesrc->stream_lock);
+}
+
+static void
+gst_nle_source_class_init (GstNleSourceClass * klass)
+{
+ GObjectClass *object_class;
+ GstElementClass *element_class;
+
+ object_class = (GObjectClass *) klass;
+ element_class = (GstElementClass *) klass;
+ parent_class = g_type_class_peek_parent (klass);
+
+ /* GObject */
+ object_class->dispose = gst_nle_source_dispose;
+
+ /* GstElement */
+ element_class->change_state = gst_nle_source_change_state;
+
+ GST_DEBUG_CATEGORY_INIT (_nlesrc_gst_debug_cat, "longomatch", 0,
+ "LongoMatch GStreamer Backend");
+}
+
+static void
+gst_nle_source_bus_message (GstBus * bus, GstMessage * message,
+ GstNleSource * nlesrc)
+{
+ switch (message->type) {
+ case GST_MESSAGE_ERROR:
+ gst_nle_source_next_threaded (nlesrc);
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+gst_nle_source_dispose (GObject * object)
+{
+ GstNleSource *nlesrc = GST_NLE_SOURCE (object);
+
+ if (nlesrc->queue != NULL) {
+ g_list_free_full (nlesrc->queue, (GDestroyNotify) gst_nle_source_item_free);
+ nlesrc->queue = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->dispose (object);
+}
+
+static GstCaps *
+gst_nle_source_get_audio_caps (GstNleSource * nlesrc)
+{
+ return gst_caps_from_string (AUDIO_CAPS_STR);
+}
+
+static void
+gst_nle_source_setup (GstNleSource * nlesrc)
+{
+ GstElement *videorate, *videoscale, *colorspace, *vident;
+ GstElement *audiorate, *audioconvert, *audioresample, *aident;
+ GstElement *a_capsfilter, *v_capsfilter;
+ GstPad *v_pad, *a_pad;
+ GstCaps *v_caps, *a_caps;
+
+ nlesrc->video_appsrc = gst_element_factory_make ("appsrc", NULL);
+ videorate = gst_element_factory_make ("videorate", NULL);
+ videoscale = gst_element_factory_make ("videoscale", NULL);
+ colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
+ v_capsfilter = gst_element_factory_make ("capsfilter", NULL);
+ nlesrc->textoverlay = gst_element_factory_make ("textoverlay", NULL);
+ vident = gst_element_factory_make ("identity", NULL);
+
+ v_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, GST_STR_FOURCC ("I420"),
+ "width", G_TYPE_INT, (gint) nlesrc->width,
+ "height", G_TYPE_INT, (gint) nlesrc->height,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
+ "framerate", GST_TYPE_FRACTION,
+ (gint) nlesrc->fps_n, (gint) nlesrc->fps_d, NULL);
+ gst_pad_set_caps (nlesrc->video_pad, v_caps);
+
+ g_object_set (nlesrc->video_appsrc, "block", TRUE, NULL);
+ g_object_set (videoscale, "add-borders", TRUE, NULL);
+ g_object_set (vident, "single-segment", TRUE, NULL);
+ g_object_set (v_capsfilter, "caps", v_caps, NULL);
+ g_object_set (nlesrc->textoverlay, "valignment", 2, "halignment", 2,
+ "auto-resize", TRUE, "wrap-mode", 0, "silent", !nlesrc->overlay_title,
+ NULL);
+
+ gst_bin_add_many (GST_BIN (nlesrc), nlesrc->video_appsrc, videorate,
+ videoscale, colorspace, v_capsfilter, nlesrc->textoverlay, vident, NULL);
+ gst_element_link_many (nlesrc->video_appsrc, videorate, videoscale,
+ colorspace, v_capsfilter, nlesrc->textoverlay, vident, NULL);
+
+ v_pad = gst_element_get_pad (vident, "src");
+ gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->video_pad), v_pad);
+
+ if (nlesrc->with_audio) {
+ nlesrc->audio_appsrc = gst_element_factory_make ("appsrc", NULL);
+ audiorate = gst_element_factory_make ("audiorate", NULL);
+ audioconvert = gst_element_factory_make ("audioconvert", NULL);
+ audioresample = gst_element_factory_make ("audioresample", NULL);
+ a_capsfilter = gst_element_factory_make ("capsfilter", NULL);
+ aident = gst_element_factory_make ("identity", NULL);
+
+ gst_bin_add_many (GST_BIN (nlesrc), nlesrc->audio_appsrc, audiorate,
+ audioconvert, audioresample, a_capsfilter, aident, NULL);
+ gst_element_link_many (nlesrc->audio_appsrc, audioresample, audioconvert,
+ audiorate, a_capsfilter, aident, NULL);
+
+ a_caps = gst_nle_source_get_audio_caps (nlesrc);
+ gst_pad_set_caps (nlesrc->audio_pad, a_caps);
+ g_object_set (a_capsfilter, "caps", a_caps, NULL);
+
+ g_object_set (nlesrc->audio_appsrc, "block", TRUE, NULL);
+ g_object_set (aident, "single-segment", TRUE, NULL);
+ a_pad = gst_element_get_pad (aident, "src");
+ gst_ghost_pad_set_target (GST_GHOST_PAD (nlesrc->audio_pad), a_pad);
+
+ }
+ nlesrc->index = -1;
+ nlesrc->accu_time = 0;
+ nlesrc->video_pad_added = FALSE;
+ nlesrc->audio_pad_added = FALSE;
+}
+
+static void
+gst_nle_source_apply_title_size (GstNleSource * nlesrc, gint size)
+{
+ gchar *font;
+
+ font = g_strdup_printf ("sans bold %d", size);
+ g_object_set (G_OBJECT (nlesrc->textoverlay), "font-desc", font, NULL);
+ g_free (font);
+}
+
+static void
+gst_nle_source_update_overlay_title (GstNleSource * nlesrc)
+{
+ glong length;
+ GstNleSrcItem *item;
+
+ item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);
+ g_object_set (G_OBJECT (nlesrc->textoverlay), "text", item->title, NULL);
+
+ length = g_utf8_strlen (item->title, -1);
+ if (length * nlesrc->title_size > nlesrc->width) {
+ gst_nle_source_apply_title_size (nlesrc, nlesrc->width / length - 1);
+ } else {
+ gst_nle_source_apply_title_size (nlesrc, nlesrc->title_size);
+ }
+}
+
+static GstFlowReturn
+gst_nle_source_push_buffer (GstNleSource * nlesrc, GstBuffer * buf,
+ gboolean is_audio)
+{
+ GstAppSrc *appsrc;
+ gboolean push_buf;
+ guint64 buf_ts, buf_rel_ts, last_ts;
+ GstNleSrcItem *item;
+
+ item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);
+ buf_ts = GST_BUFFER_TIMESTAMP (buf);
+ buf_rel_ts = buf_ts - item->start;
+
+ g_mutex_lock (&nlesrc->stream_lock);
+
+ if (is_audio) {
+ push_buf = nlesrc->audio_seek_done;
+ last_ts = nlesrc->audio_ts;
+ nlesrc->audio_ts = buf_ts;
+ appsrc = GST_APP_SRC (nlesrc->audio_appsrc);
+ } else {
+ push_buf = nlesrc->video_seek_done;
+ last_ts = nlesrc->video_ts;
+ nlesrc->video_ts = buf_ts;
+ appsrc = GST_APP_SRC (nlesrc->video_appsrc);
+ }
+
+ if (push_buf && GST_BUFFER_TIMESTAMP (buf) >= last_ts) {
+ /* Retimestamps buffer */
+ guint64 new_ts = nlesrc->start_ts + buf_rel_ts / item->rate;
+
+ GST_BUFFER_TIMESTAMP (buf) = new_ts;
+ GST_LOG_OBJECT (nlesrc, "Pushing %s buffer with ts: %" GST_TIME_FORMAT
+ " dur:%" GST_TIME_FORMAT " orig:%" GST_TIME_FORMAT,
+ is_audio ? "audio" : "video", GST_TIME_ARGS (new_ts),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_TIME_ARGS (buf_ts));
+ if (GST_BUFFER_DURATION_IS_VALID (buf)) {
+ new_ts += GST_BUFFER_DURATION (buf);
+ }
+ if (new_ts >= nlesrc->accu_time) {
+ nlesrc->accu_time = new_ts;
+ }
+ /* We need to unlock before pushing since push_buffer can block */
+ g_mutex_unlock (&nlesrc->stream_lock);
+ return gst_app_src_push_buffer (appsrc, buf);
+ } else {
+ GST_LOG_OBJECT (nlesrc, "Discard %s buffer with ts: %" GST_TIME_FORMAT,
+ is_audio ? "audio" : "video", GST_TIME_ARGS (buf_ts));
+ gst_buffer_unref (buf);
+ g_mutex_unlock (&nlesrc->stream_lock);
+ return GST_FLOW_OK;
+ }
+}
+
+static void
+gst_nle_source_no_more_pads (GstElement * element, GstNleSource * nlesrc)
+{
+ /* If the input stream doesn't contain audio or it's a still picture we fill
+ * the gap with a dummy audio buffer with silence */
+ if (nlesrc->with_audio && !nlesrc->audio_linked) {
+ GstBuffer *buf;
+ GstNleSrcItem *item;
+ GstCaps *caps;
+ guint64 duration;
+ guint bps = BITS_PER_SAMPLE / 8;
+
+ GST_INFO_OBJECT (nlesrc, "Pushing dummy audio buffer");
+
+ nlesrc->audio_seek_done = TRUE;
+
+ if (!nlesrc->audio_pad_added) {
+ gst_pad_set_active (nlesrc->audio_pad, TRUE);
+ gst_element_add_pad (GST_ELEMENT (nlesrc), nlesrc->audio_pad);
+ nlesrc->audio_pad_added = TRUE;
+ }
+ item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);
+ duration = item->duration / item->rate;
+ buf = gst_buffer_new_and_alloc (bps * duration / GST_SECOND);
+ /* Generate silence */
+ memset (GST_BUFFER_DATA (buf), 0, GST_BUFFER_SIZE (buf));
+ GST_BUFFER_TIMESTAMP (buf) = item->start;
+ caps = gst_nle_source_get_audio_caps (nlesrc);
+ gst_buffer_set_caps (buf, caps);
+ gst_caps_unref (caps);
+
+ gst_nle_source_push_buffer (nlesrc, buf, TRUE);
+ }
+}
+
+static GstFlowReturn
+gst_nle_source_on_preroll_buffer (GstAppSink * appsink, gpointer data)
+{
+ gst_buffer_unref (gst_app_sink_pull_preroll (appsink));
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_nle_source_on_video_buffer (GstAppSink * appsink, gpointer data)
+{
+ GstNleSrcItem *item;
+ GstNleSource *nlesrc;
+ GstBuffer *buf;
+
+ nlesrc = GST_NLE_SOURCE (data);
+ item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);
+
+ buf = gst_app_sink_pull_buffer (appsink);
+
+ if (item->still_picture) {
+ GstBuffer *end_buf;
+
+ end_buf = gst_buffer_copy (buf);
+
+ /* Push the start buffer and last 2 ones and let videorate fill the gap */
+ GST_BUFFER_TIMESTAMP (buf) = item->start;
+ GST_BUFFER_DURATION (buf) = 40 * GST_MSECOND;
+ nlesrc->video_seek_done = TRUE;
+ gst_nle_source_push_buffer (nlesrc, buf, FALSE);
+
+ buf = gst_buffer_copy (end_buf);
+ GST_BUFFER_TIMESTAMP (buf) = item->stop - 80 * GST_MSECOND;
+ GST_BUFFER_DURATION (buf) = 40 * GST_MSECOND;
+ gst_nle_source_push_buffer (nlesrc, buf, FALSE);
+
+ GST_BUFFER_TIMESTAMP (end_buf) = item->stop - 40 * GST_MSECOND;
+ GST_BUFFER_DURATION (buf) = 40 * GST_MSECOND;
+ buf = end_buf;
+ }
+ return gst_nle_source_push_buffer (nlesrc, buf, FALSE);
+}
+
+static GstFlowReturn
+gst_nle_source_on_audio_buffer (GstAppSink * appsink, gpointer data)
+{
+ GstNleSource *nlesrc = GST_NLE_SOURCE (data);
+
+ return gst_nle_source_push_buffer (nlesrc,
+ gst_app_sink_pull_buffer (appsink), TRUE);
+}
+
+static void
+gst_nle_source_check_eos (GstNleSource * nlesrc)
+{
+ g_mutex_lock (&nlesrc->stream_lock);
+ if (nlesrc->video_eos && nlesrc->audio_eos) {
+ nlesrc->audio_eos = FALSE;
+ nlesrc->video_eos = FALSE;
+ GST_DEBUG_OBJECT (nlesrc, "All pads are EOS");
+ gst_nle_source_next_threaded (nlesrc);
+ }
+ g_mutex_unlock (&nlesrc->stream_lock);
+}
+
+static void
+gst_nle_source_on_video_eos (GstAppSink * appsink, gpointer data)
+{
+ GstNleSource *nlesrc = GST_NLE_SOURCE (data);
+
+ GST_DEBUG_OBJECT (nlesrc, "Video pad is EOS");
+ nlesrc->video_eos = TRUE;
+ gst_nle_source_check_eos (nlesrc);
+}
+
+static void
+gst_nle_source_on_audio_eos (GstAppSink * appsink, gpointer data)
+{
+ GstNleSource *nlesrc = GST_NLE_SOURCE (data);
+
+ GST_DEBUG_OBJECT (nlesrc, "Audio pad is EOS");
+ nlesrc->audio_eos = TRUE;
+ gst_nle_source_check_eos (nlesrc);
+}
+
+static gboolean
+gst_nle_source_video_pad_probe_cb (GstPad * pad, GstEvent * event,
+ GstNleSource * nlesrc)
+{
+ if (event->type == GST_EVENT_NEWSEGMENT) {
+ g_mutex_lock (&nlesrc->stream_lock);
+ if (!nlesrc->video_seek_done && nlesrc->seek_done) {
+ GST_DEBUG_OBJECT (nlesrc, "NEWSEGMENT on the video pad");
+ nlesrc->video_seek_done = TRUE;
+ gst_nle_source_update_overlay_title (nlesrc);
+ }
+ g_mutex_unlock (&nlesrc->stream_lock);
+ }
+ return TRUE;
+}
+
+static gboolean
+gst_nle_source_audio_pad_probe_cb (GstPad * pad, GstEvent * event,
+ GstNleSource * nlesrc)
+{
+ if (event->type == GST_EVENT_NEWSEGMENT) {
+ g_mutex_lock (&nlesrc->stream_lock);
+ if (!nlesrc->audio_seek_done && nlesrc->seek_done) {
+ GST_DEBUG_OBJECT (nlesrc, "NEWSEGMENT on the audio pad");
+ nlesrc->audio_seek_done = TRUE;
+ }
+ g_mutex_unlock (&nlesrc->stream_lock);
+ }
+ return TRUE;
+}
+
+static void
+gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
+ GstNleSource * nlesrc)
+{
+ GstCaps *caps;
+ const GstStructure *s;
+ const gchar *mime;
+ GstElement *appsink = NULL;
+ GstPad *sink_pad;
+ GstAppSinkCallbacks appsink_cbs;
+
+ caps = gst_pad_get_caps_reffed (pad);
+ s = gst_caps_get_structure (caps, 0);
+ mime = gst_structure_get_name (s);
+ GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);
+
+ if (g_strrstr (mime, "video") && !nlesrc->video_linked) {
+ appsink = gst_element_factory_make ("appsink", NULL);
+ memset (&appsink_cbs, 0, sizeof (appsink_cbs));
+ appsink_cbs.eos = gst_nle_source_on_video_eos;
+ appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
+ appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;
+ nlesrc->video_linked = TRUE;
+ if (!nlesrc->video_pad_added) {
+ gst_pad_set_active (nlesrc->video_pad, TRUE);
+ gst_element_add_pad (GST_ELEMENT (nlesrc), nlesrc->video_pad);
+ nlesrc->video_pad_added = TRUE;
+ }
+ gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
+ (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);
+ nlesrc->video_eos = FALSE;
+ } else if (g_strrstr (mime, "audio") && nlesrc->with_audio
+ && !nlesrc->audio_linked) {
+ appsink = gst_element_factory_make ("appsink", NULL);
+ memset (&appsink_cbs, 0, sizeof (appsink_cbs));
+ appsink_cbs.eos = gst_nle_source_on_audio_eos;
+ appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
+ appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;
+ nlesrc->audio_linked = TRUE;
+ if (!nlesrc->audio_pad_added) {
+ gst_pad_set_active (nlesrc->audio_pad, TRUE);
+ gst_element_add_pad (GST_ELEMENT (nlesrc), nlesrc->audio_pad);
+ nlesrc->audio_pad_added = TRUE;
+ }
+ gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
+ (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);
+ nlesrc->audio_eos = FALSE;
+ }
+ if (appsink != NULL) {
+ g_object_set (appsink, "sync", FALSE, NULL);
+ gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,
+ NULL);
+ gst_bin_add (GST_BIN (nlesrc->decoder), appsink);
+ sink_pad = gst_element_get_static_pad (appsink, "sink");
+ gst_pad_link (pad, sink_pad);
+ gst_element_sync_state_with_parent (appsink);
+ gst_object_unref (sink_pad);
+ }
+}
+
+static void
+gst_nle_source_push_eos (GstNleSource * nlesrc)
+{
+ GST_INFO_OBJECT (nlesrc, "All items rendered, pushing eos");
+
+ if (nlesrc->video_appsrc) {
+ gst_app_src_end_of_stream (GST_APP_SRC (nlesrc->video_appsrc));
+ }
+ if (nlesrc->audio_appsrc) {
+ gst_app_src_end_of_stream (GST_APP_SRC (nlesrc->audio_appsrc));
+ }
+}
+
+static void
+gst_nle_source_next_threaded (GstNleSource * nlesrc)
+{
+ g_thread_new ("next", (GThreadFunc) gst_nle_source_next, nlesrc);
+}
+
+static void
+gst_nle_source_next (GstNleSource * nlesrc)
+{
+ GstNleSrcItem *item;
+ GstStateChangeReturn ret;
+ GstElement *uridecodebin;
+ GstBus *bus;
+ GstState state;
+
+ nlesrc->index++;
+
+ if (nlesrc->index >= g_list_length (nlesrc->queue)) {
+ gst_nle_source_push_eos (nlesrc);
+ return;
+ }
+
+ if (nlesrc->decoder != NULL) {
+ gst_element_set_state (GST_ELEMENT (nlesrc->decoder), GST_STATE_NULL);
+ gst_element_get_state (GST_ELEMENT (nlesrc->decoder), NULL, NULL, 0);
+ gst_object_unref (nlesrc->decoder);
+ }
+
+ nlesrc->decoder = gst_pipeline_new ("decoder");
+ uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
+ gst_bin_add (GST_BIN (nlesrc->decoder), uridecodebin);
+
+ g_signal_connect (uridecodebin, "pad-added",
+ G_CALLBACK (gst_nle_source_pad_added_cb), nlesrc);
+ g_signal_connect (uridecodebin, "no-more-pads",
+ G_CALLBACK (gst_nle_source_no_more_pads), nlesrc);
+
+ bus = GST_ELEMENT_BUS (nlesrc->decoder);
+ gst_bus_add_signal_watch (bus);
+ g_signal_connect (bus, "message", G_CALLBACK (gst_nle_source_bus_message),
+ nlesrc);
+ item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);
+
+ GST_INFO_OBJECT (nlesrc, "Starting next item with uri:%s", item->file_path);
+ GST_INFO_OBJECT (nlesrc, "start:%" GST_TIME_FORMAT " stop:%"
+ GST_TIME_FORMAT " rate:%f", GST_TIME_ARGS (item->start),
+ GST_TIME_ARGS (item->stop), item->rate);
+
+ g_object_set (uridecodebin, "uri", item->file_path, NULL);
+
+ nlesrc->seek_done = FALSE;
+ nlesrc->video_seek_done = FALSE;
+ nlesrc->audio_seek_done = FALSE;
+ nlesrc->audio_eos = TRUE;
+ nlesrc->video_eos = TRUE;
+ nlesrc->audio_ts = 0;
+ nlesrc->video_ts = 0;
+ nlesrc->start_ts = nlesrc->accu_time;
+ nlesrc->video_linked = FALSE;
+ nlesrc->audio_linked = FALSE;
+
+ GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,
+ GST_TIME_ARGS (nlesrc->start_ts));
+ gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);
+ ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 2 * GST_SECOND);
+ if (ret == GST_STATE_CHANGE_FAILURE) {
+ GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");
+ gst_nle_source_check_eos (nlesrc);
+ return;
+ }
+
+ nlesrc->seek_done = TRUE;
+ if (!item->still_picture) {
+ GST_DEBUG_OBJECT (nlesrc, "Sending seek event");
+ gst_element_seek (nlesrc->decoder, 1, GST_FORMAT_TIME,
+ GST_SEEK_FLAG_ACCURATE,
+ GST_SEEK_TYPE_SET, item->start, GST_SEEK_TYPE_SET, item->stop);
+ }
+}
+
+
+static GstStateChangeReturn
+gst_nle_source_change_state (GstElement * element, GstStateChange transition)
+{
+ GstNleSource *nlesrc;
+ GstStateChangeReturn res;
+
+ nlesrc = GST_NLE_SOURCE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_nle_source_setup (nlesrc);
+ gst_nle_source_next (nlesrc);
+ break;
+ default:
+ break;
+ }
+
+ res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (res == GST_STATE_CHANGE_FAILURE)
+ return res;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (nlesrc->decoder) {
+ gst_element_set_state (nlesrc->decoder, GST_STATE_NULL);
+ gst_object_unref (nlesrc->decoder);
+ }
+ default:
+ break;
+ }
+
+ return res;
+}
+
+void
+gst_nle_source_add_item (GstNleSource * nlesrc, const gchar * file_path,
+ const gchar * title, guint64 start, guint64 stop, gfloat rate,
+ gboolean still_picture)
+{
+ GstNleSrcItem *item;
+ gchar *uri;
+
+ uri = lgm_filename_to_uri (file_path);
+ item = gst_nle_source_item_new (uri, title, start, stop, rate, still_picture);
+ g_free (uri);
+ nlesrc->queue = g_list_append (nlesrc->queue, item);
+
+ GST_INFO_OBJECT (nlesrc, "Added new item to the queue start:%"
+ GST_TIME_FORMAT " stop:%" GST_TIME_FORMAT "rate:%f",
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop), rate);
+}
+
+void
+gst_nle_source_configure (GstNleSource * nlesrc, guint width, guint height,
+ guint fps_n, guint fps_d, gboolean overlay_title, gboolean with_audio)
+{
+ nlesrc->width = width;
+ nlesrc->height = height;
+ nlesrc->fps_n = fps_n;
+ nlesrc->fps_d = fps_d;
+ nlesrc->overlay_title = overlay_title;
+ nlesrc->with_audio = with_audio;
+ nlesrc->title_size = 15;
+
+ GST_INFO_OBJECT (nlesrc, "Configuring source with %dx%d %d/%dfps t:%d a:%d",
+ width, height, fps_n, fps_d, overlay_title, with_audio);
+}
+
+GstNleSource *
+gst_nle_source_new (void)
+{
+ GstNleSource *nlesrc;
+
+ nlesrc = g_object_new (GST_TYPE_NLE_SOURCE, NULL);
+ return nlesrc;
+}
diff --git a/libcesarplayer/gst-nle-source.h b/libcesarplayer/gst-nle-source.h
new file mode 100644
index 0000000..c133944
--- /dev/null
+++ b/libcesarplayer/gst-nle-source.h
@@ -0,0 +1,114 @@
+/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */
+/*
+ * Gstreamer NLE source
+ * Copyright (C) Andoni Morales Alastruey 2013 <ylatuya gmail com>
+ *
+ * You may redistribute it and/or modify it under the terms of the
+ * GNU General Public License, as published by the Free Software
+ * Foundation; either version 2 of the License, or (at your option)
+ * any later version.
+ *
+ * foob is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+ * See the GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with foob. If not, write to:
+ * The Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor
+ * Boston, MA 02110-1301, USA.
+ */
+
+#ifndef _GST_NLE_SOURCE_H_
+#define _GST_NLE_SOURCE_H_
+
+#ifdef WIN32
+#define EXPORT __declspec (dllexport)
+#else
+#define EXPORT
+#endif
+
+#include <glib-object.h>
+#include <gtk/gtk.h>
+#include "common.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_NLE_SOURCE (gst_nle_source_get_type ())
+#define GST_NLE_SOURCE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_NLE_SOURCE,
GstNleSource))
+#define GST_NLE_SOURCE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_NLE_SOURCE,
GstNleSourceClass))
+#define GST_IS_NLE_SOURCE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_NLE_SOURCE))
+#define GST_IS_NLE_SOURCE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_NLE_SOURCE))
+#define GST_NLE_SOURCE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_NLE_SOURCE,
GstNleSourceClass))
+#define GCC_ERROR gst_nle_source_error_quark ()
+typedef struct _GstNleSourceClass GstNleSourceClass;
+typedef struct _GstNleSource GstNleSource;
+
+
+struct _GstNleSourceClass
+{
+ GstBinClass parent_class;
+};
+
+struct _GstNleSource
+{
+ GstBin parent;
+
+ guint width;
+ guint height;
+ guint fps_n;
+ guint fps_d;
+ guint title_size;
+ gboolean overlay_title;
+ gboolean with_audio;
+
+ GstPad *video_pad;
+ GstPad *audio_pad;
+ GstElement *video_appsrc;
+ GstElement *audio_appsrc;
+ GstElement *textoverlay;
+ gboolean video_linked;
+ gboolean audio_linked;
+ gboolean video_pad_added;
+ gboolean audio_pad_added;
+
+ GstElement *decoder;
+
+ guint64 accu_time;
+ guint64 start_ts;
+ guint64 video_ts;
+ guint64 audio_ts;
+ gboolean seek_done;
+ gboolean audio_seek_done;
+ gboolean video_seek_done;
+ gboolean audio_eos;
+ gboolean video_eos;
+
+ GMutex stream_lock;
+
+ GList *queue;
+ gint index;
+};
+
+EXPORT GType gst_nle_source_get_type (void) G_GNUC_CONST;
+
+EXPORT GstNleSource *gst_nle_source_new (void);
+
+EXPORT void gst_nle_source_configure (GstNleSource *nlesrc,
+ guint width, guint height,
+ guint fps_n, guint fps_d,
+ gboolean overlay_title,
+ gboolean with_audio
+ );
+
+EXPORT void gst_nle_source_add_item (GstNleSource *nlesrc,
+ const gchar *file_path,
+ const gchar *title,
+ guint64 start,
+ guint64 stop,
+ gfloat rate,
+ gboolean still_picture
+ );
+G_END_DECLS
+#endif /* _GST_NLE_SOURCE_H_ */
+
diff --git a/libcesarplayer/gst-video-editor.c b/libcesarplayer/gst-video-editor.c
index 79b27c3..e312dd0 100644
--- a/libcesarplayer/gst-video-editor.c
+++ b/libcesarplayer/gst-video-editor.c
@@ -1,4 +1,4 @@
- /*GStreamer Video Editor Based On GNonlin
+ /* GStreamer Non Linear Video Editor
* Copyright (C) 2007-2009 Andoni Morales Alastruey <ylatuya gmail com>
*
* This program is free software.
@@ -24,6 +24,7 @@
#include <stdio.h>
#include <gst/gst.h>
#include "gst-video-editor.h"
+#include "gst-nle-source.h"
#include "video-utils.h"
@@ -49,13 +50,8 @@ enum
struct GstVideoEditorPrivate
{
- gint segments;
- gint active_segment;
- gint64 *stop_times;
- GList *titles;
- GList *gnl_video_filesources;
- GList *gnl_audio_filesources;
gint64 duration;
+ gint64 last_pos;
/* Properties */
gboolean audio_enabled;
@@ -78,15 +74,11 @@ struct GstVideoEditorPrivate
GstElement *aencode_bin;
/* Source */
- GstElement *gnl_video_composition;
- GstElement *gnl_audio_composition;
+ GstNleSource *nle_source;
/* Video */
GstElement *identity;
GstElement *ffmpegcolorspace;
- GstElement *videorate;
- GstElement *textoverlay;
- GstElement *videoscale;
GstElement *capsfilter;
GstElement *queue;
GstElement *video_encoder;
@@ -117,7 +109,7 @@ static void new_decoded_pad_cb (GstElement * object, GstPad * arg0,
static void gve_bus_message_cb (GstBus * bus, GstMessage * message,
gpointer data);
static gboolean gve_query_timeout (GstVideoEditor * gve);
-static void gve_apply_title_size (GstVideoEditor * gve, gint size);
+
G_DEFINE_TYPE (GstVideoEditor, gst_video_editor, G_TYPE_OBJECT);
@@ -145,14 +137,7 @@ gst_video_editor_init (GstVideoEditor * object)
priv->title_size = 20;
priv->title_enabled = TRUE;
priv->audio_enabled = TRUE;
-
- priv->duration = 0;
- priv->segments = 0;
- priv->gnl_video_filesources = NULL;
- priv->gnl_audio_filesources = NULL;
- priv->titles = NULL;
- priv->stop_times = (gint64 *) malloc (200 * sizeof (gint64));
-
+ priv->nle_source = NULL;
priv->update_id = 0;
}
@@ -180,11 +165,6 @@ gst_video_editor_finalize (GObject * object)
}
g_free (gve->priv->output_file);
- g_list_free (gve->priv->gnl_video_filesources);
- g_list_free (gve->priv->gnl_audio_filesources);
- g_free (gve->priv->stop_times);
- g_list_free (gve->priv->titles);
-
G_OBJECT_CLASS (gst_video_editor_parent_class)->finalize (object);
}
@@ -234,34 +214,6 @@ gve_set_tick_timeout (GstVideoEditor * gve, guint msecs)
g_timeout_add (msecs, (GSourceFunc) gve_query_timeout, gve);
}
-static void
-gve_apply_title_size (GstVideoEditor * gve, gint size)
-{
- gchar *font;
-
- font = g_strdup_printf ("sans bold %d", size);
- g_object_set (G_OBJECT (gve->priv->textoverlay), "font-desc", font, NULL);
- g_free (font);
-}
-
-static void
-gve_set_overlay_title (GstVideoEditor *gve, gchar *title)
-{
- glong length;
-
- if (title == NULL)
- return;
-
- g_object_set (G_OBJECT (gve->priv->textoverlay), "text", title, NULL);
-
- length = g_utf8_strlen (title, -1);
- if (length * gve->priv->title_size > gve->priv->width) {
- gve_apply_title_size (gve, gve->priv->width / length - 1);
- } else {
- gve_apply_title_size (gve, gve->priv->title_size);
- }
-}
-
GQuark
gst_video_editor_error_quark (void)
{
@@ -279,58 +231,33 @@ gve_create_video_encode_bin (GstVideoEditor * gve)
GstPad *sinkpad = NULL;
GstPad *srcpad = NULL;
GError *error = NULL;
- GstCaps *caps;
gve->priv->vencode_bin = gst_element_factory_make ("bin", "vencodebin");
gve->priv->identity = gst_element_factory_make ("identity", "identity");
gve->priv->ffmpegcolorspace =
gst_element_factory_make ("ffmpegcolorspace", "ffmpegcolorspace");
- gve->priv->videorate = gst_element_factory_make ("videorate", "videorate");
- gve->priv->videoscale = gst_element_factory_make ("videoscale", "videoscale");
- gve->priv->capsfilter = gst_element_factory_make ("capsfilter", "capsfilter");
- gve->priv->textoverlay =
- gst_element_factory_make ("textoverlay", "textoverlay");
- gve->priv->queue = gst_element_factory_make ("queue", "video-encode-queue");
- gve->priv->video_encoder = lgm_create_video_encoder (
- gve->priv->video_encoder_type, gve->priv->video_quality,
- GVE_ERROR, &error);
+ gve->priv->queue = gst_element_factory_make ("queue2", "video-encode-queue");
+ gve->priv->video_encoder =
+ lgm_create_video_encoder (gve->priv->video_encoder_type,
+ gve->priv->video_quality, GVE_ERROR, &error);
if (error) {
g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, error->message);
g_error_free (error);
return;
}
- caps = gst_caps_new_simple ("video/x-raw-yuv",
- "width", G_TYPE_INT, (gint) gve->priv->width,
- "height", G_TYPE_INT, (gint) gve->priv->height,
- "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
- "framerate", GST_TYPE_FRACTION,
- (gint) gve->priv->fps_n, (gint) gve->priv->fps_d, NULL);
- g_object_set (G_OBJECT (gve->priv->capsfilter), "caps", caps, NULL);
- gst_caps_unref (caps);
g_object_set (G_OBJECT (gve->priv->identity), "single-segment", TRUE, NULL);
- g_object_set (G_OBJECT (gve->priv->textoverlay),
- "valignment", 2, "halignment", 2, "auto-resize", TRUE, "wrap-mode", 0, NULL);
- gve_apply_title_size (gve, gve->priv->title_size);
- if (!gve->priv->title_enabled) {
- g_object_set (G_OBJECT (gve->priv->textoverlay), "silent", TRUE, NULL);
- }
- g_object_set (G_OBJECT (gve->priv->videoscale), "add-borders", TRUE, NULL);
+ g_object_set (G_OBJECT (gve->priv->queue), "max-size-bytes", 4 * 1000 * 1000,
+ "max-size-buffers", 0, "max-size-time", 0, NULL);
/*Add and link elements */
gst_bin_add_many (GST_BIN (gve->priv->vencode_bin),
gve->priv->identity,
gve->priv->ffmpegcolorspace,
- gve->priv->videorate,
- gve->priv->videoscale,
- gve->priv->capsfilter,
- gve->priv->textoverlay, gve->priv->queue, gve->priv->video_encoder, NULL);
+ gve->priv->video_encoder, gve->priv->queue, NULL);
gst_element_link_many (gve->priv->identity,
gve->priv->ffmpegcolorspace,
- gve->priv->videoscale,
- gve->priv->videorate,
- gve->priv->capsfilter,
- gve->priv->textoverlay, gve->priv->queue, gve->priv->video_encoder, NULL);
+ gve->priv->video_encoder, gve->priv->queue, NULL);
/*Create bin sink pad */
sinkpad = gst_element_get_static_pad (gve->priv->identity, "sink");
@@ -339,7 +266,7 @@ gve_create_video_encode_bin (GstVideoEditor * gve)
gst_ghost_pad_new ("sink", sinkpad));
/*Creat bin src pad */
- srcpad = gst_element_get_static_pad (gve->priv->video_encoder, "src");
+ srcpad = gst_element_get_static_pad (gve->priv->queue, "src");
gst_pad_set_active (srcpad, TRUE);
gst_element_add_pad (GST_ELEMENT (gve->priv->vencode_bin),
gst_ghost_pad_new ("src", srcpad));
@@ -354,7 +281,6 @@ gve_create_audio_encode_bin (GstVideoEditor * gve)
GstPad *sinkpad = NULL;
GstPad *srcpad = NULL;
GError *error = NULL;
- GstCaps *caps;
if (gve->priv->aencode_bin != NULL)
return;
@@ -362,16 +288,10 @@ gve_create_audio_encode_bin (GstVideoEditor * gve)
gve->priv->aencode_bin = gst_element_factory_make ("bin", "aencodebin");
gve->priv->audioidentity =
gst_element_factory_make ("identity", "audio-identity");
- gve->priv->audioconvert =
- gst_element_factory_make ("audioconvert", "audioconvert");
- gve->priv->audioresample =
- gst_element_factory_make ("audioresample", "audioresample");
- gve->priv->audiocapsfilter =
- gst_element_factory_make ("capsfilter", "audiocapsfilter");
- gve->priv->audioqueue = gst_element_factory_make ("queue", "audio-queue");
- gve->priv->audioencoder = lgm_create_audio_encoder (
- gve->priv->audio_encoder_type, gve->priv->audio_quality, GVE_ERROR,
- &error);
+ gve->priv->audioqueue = gst_element_factory_make ("queue2", "audio-queue");
+ gve->priv->audioencoder =
+ lgm_create_audio_encoder (gve->priv->audio_encoder_type,
+ gve->priv->audio_quality, GVE_ERROR, &error);
if (error) {
g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, error->message);
g_error_free (error);
@@ -380,23 +300,16 @@ gve_create_audio_encode_bin (GstVideoEditor * gve)
g_object_set (G_OBJECT (gve->priv->audioidentity), "single-segment", TRUE,
NULL);
- caps = gst_caps_from_string (AUDIO_INT_CAPS ";" AUDIO_FLOAT);
- g_object_set (G_OBJECT (gve->priv->audiocapsfilter), "caps", caps, NULL);
- gst_caps_unref (caps);
+ g_object_set (G_OBJECT (gve->priv->audioqueue), "max-size-bytes",
+ 4 * 1000 * 1000, "max-size-buffers", 0, "max-size-time", 0, NULL);
/*Add and link elements */
gst_bin_add_many (GST_BIN (gve->priv->aencode_bin),
gve->priv->audioidentity,
- gve->priv->audioconvert,
- gve->priv->audioresample,
- gve->priv->audiocapsfilter,
- gve->priv->audioqueue, gve->priv->audioencoder, NULL);
+ gve->priv->audioencoder, gve->priv->audioqueue, NULL);
gst_element_link_many (gve->priv->audioidentity,
- gve->priv->audioconvert,
- gve->priv->audioresample,
- gve->priv->audiocapsfilter,
- gve->priv->audioqueue, gve->priv->audioencoder, NULL);
+ gve->priv->audioencoder, gve->priv->audioqueue, NULL);
/*Create bin sink pad */
sinkpad = gst_element_get_static_pad (gve->priv->audioidentity, "sink");
@@ -405,7 +318,7 @@ gve_create_audio_encode_bin (GstVideoEditor * gve)
gst_ghost_pad_new ("sink", sinkpad));
/*Creat bin src pad */
- srcpad = gst_element_get_static_pad (gve->priv->audioencoder, "src");
+ srcpad = gst_element_get_static_pad (gve->priv->audioqueue, "src");
gst_pad_set_active (srcpad, TRUE);
gst_element_add_pad (GST_ELEMENT (gve->priv->aencode_bin),
gst_ghost_pad_new ("src", srcpad));
@@ -433,12 +346,11 @@ new_decoded_pad_cb (GstElement * object, GstPad * pad, gpointer user_data)
gve = GST_VIDEO_EDITOR (user_data);
/* check media type */
- caps = gst_pad_get_caps (pad);
+ caps = GST_PAD_CAPS (pad);
str = gst_caps_get_structure (caps, 0);
if (g_strrstr (gst_structure_get_name (str), "video")) {
- videopad =
- gst_element_get_compatible_pad (gve->priv->vencode_bin, pad, NULL);
+ videopad = gst_element_get_static_pad (gve->priv->vencode_bin, "sink");
/* only link once */
if (GST_PAD_IS_LINKED (videopad)) {
g_object_unref (videopad);
@@ -453,8 +365,7 @@ new_decoded_pad_cb (GstElement * object, GstPad * pad, gpointer user_data)
else if (g_strrstr (gst_structure_get_name (str), "audio")
&& gve->priv->audio_enabled) {
- audiopad =
- gst_element_get_compatible_pad (gve->priv->aencode_bin, pad, NULL);
+ audiopad = gst_element_get_static_pad (gve->priv->aencode_bin, "sink");
/* only link once */
if (GST_PAD_IS_LINKED (audiopad)) {
g_object_unref (audiopad);
@@ -535,7 +446,6 @@ gve_bus_message_cb (GstBus * bus, GstMessage * message, gpointer data)
}
gst_element_set_state (gve->priv->main_pipeline, GST_STATE_NULL);
g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) 1);
- gve->priv->active_segment = 0;
/* Close file sink properly */
g_object_set (G_OBJECT (gve->priv->file_sink), "location", "", NULL);
break;
@@ -552,51 +462,34 @@ gve_error_msg (GstVideoEditor * gve, GstMessage * msg)
gchar *dbg = NULL;
gst_message_parse_error (msg, &err, &dbg);
- if (err) {
- GST_ERROR ("message = %s", GST_STR_NULL (err->message));
- GST_ERROR ("domain = %d (%s)", err->domain,
- GST_STR_NULL (g_quark_to_string (err->domain)));
- GST_ERROR ("code = %d", err->code);
- GST_ERROR ("debug = %s", GST_STR_NULL (dbg));
- GST_ERROR ("source = %" GST_PTR_FORMAT, msg->src);
-
- g_message ("Error: %s\n%s\n", GST_STR_NULL (err->message),
- GST_STR_NULL (dbg));
- g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, err->message);
- g_error_free (err);
- }
+ GST_ERROR ("message = %s", GST_STR_NULL (err->message));
+ GST_ERROR ("domain = %d (%s)", err->domain,
+ GST_STR_NULL (g_quark_to_string (err->domain)));
+ GST_ERROR ("code = %d", err->code);
+ GST_ERROR ("debug = %s", GST_STR_NULL (dbg));
+ GST_ERROR ("source = %" GST_PTR_FORMAT, msg->src);
+
+ g_message ("Error: %s\n%s\n", GST_STR_NULL (err->message),
+ GST_STR_NULL (dbg));
+ g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, err->message);
+ g_error_free (err);
g_free (dbg);
}
static gboolean
gve_query_timeout (GstVideoEditor * gve)
{
- GstFormat fmt = GST_FORMAT_TIME;
- gint64 pos = -1;
- gchar *title;
- gint64 stop_time = gve->priv->stop_times[gve->priv->active_segment];
-
- if (gst_element_query_position (gve->priv->main_pipeline, &fmt, &pos)) {
- if (pos != -1 && fmt == GST_FORMAT_TIME) {
- g_signal_emit (gve,
- gve_signals[SIGNAL_PERCENT_COMPLETED],
- 0, (float) pos / (float) gve->priv->duration);
- }
- } else {
- GST_INFO ("could not get position");
- }
-
- if (gst_element_query_position (gve->priv->video_encoder, &fmt, &pos)) {
- if (stop_time - pos <= 0) {
+ g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED],
+ 0, (float) gve->priv->last_pos / (float) gve->priv->duration);
+ return TRUE;
+}
- gve->priv->active_segment++;
- title =
- (gchar *) g_list_nth_data (gve->priv->titles,
- gve->priv->active_segment);
- gve_set_overlay_title (gve, title);
- }
+static gboolean
+gve_on_buffer_cb (GstPad * pad, GstBuffer * buf, GstVideoEditor * gve)
+{
+ if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
+ gve->priv->last_pos = GST_BUFFER_TIMESTAMP (buf);
}
-
return TRUE;
}
@@ -613,11 +506,6 @@ gst_video_editor_add_segment (GstVideoEditor * gve, gchar * file,
gchar * title, gboolean hasAudio)
{
GstState cur_state;
- GstElement *gnl_filesource = NULL;
- GstElement *audiotestsrc = NULL;
- GstCaps *filter = NULL;
- gchar *element_name = "";
- gint64 final_duration;
g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));
@@ -627,77 +515,24 @@ gst_video_editor_add_segment (GstVideoEditor * gve, gchar * file,
return;
}
- start = GST_MSECOND * start;
- duration = GST_MSECOND * duration;
- final_duration = duration / rate;
-
- /* Video */
- filter = gst_caps_from_string ("video/x-raw-rgb;video/x-raw-yuv");
- element_name = g_strdup_printf ("gnlvideofilesource%d", gve->priv->segments);
- gnl_filesource = gst_element_factory_make ("gnlfilesource", element_name);
- g_object_set (G_OBJECT (gnl_filesource), "location", file,
- "media-start", start,
- "media-duration", duration,
- "start", gve->priv->duration,
- "duration", final_duration, "caps", filter, NULL);
- gst_bin_add (GST_BIN (gve->priv->gnl_video_composition), gnl_filesource);
- gve->priv->gnl_video_filesources =
- g_list_append (gve->priv->gnl_video_filesources, gnl_filesource);
+ duration = duration * GST_MSECOND;
+ start = start * GST_MSECOND;
- /* Audio */
- if (gve->priv->audio_enabled) {
- if (hasAudio && rate == 1) {
- element_name =
- g_strdup_printf ("gnlaudiofilesource%d", gve->priv->segments);
- gnl_filesource = gst_element_factory_make ("gnlfilesource", element_name);
- g_object_set (G_OBJECT (gnl_filesource), "location", file, NULL);
- } else {
- /* If the file doesn't contain audio, something must be playing */
- /* We use an audiotestsrc mutted and with a low priority */
- element_name =
- g_strdup_printf ("gnlaudiofakesource%d", gve->priv->segments);
- gnl_filesource = gst_element_factory_make ("gnlsource", element_name);
- element_name = g_strdup_printf ("audiotestsource%d", gve->priv->segments);
- audiotestsrc = gst_element_factory_make ("audiotestsrc", element_name);
- g_object_set (G_OBJECT (audiotestsrc), "volume", (double) 0, NULL);
- gst_bin_add (GST_BIN (gnl_filesource), audiotestsrc);
- }
- filter = gst_caps_from_string ("audio/x-raw-float;audio/x-raw-int");
- g_object_set (G_OBJECT (gnl_filesource),
- "media-start", start,
- "media-duration", duration,
- "start", gve->priv->duration,
- "duration", final_duration, "caps", filter, NULL);
- gst_bin_add (GST_BIN (gve->priv->gnl_audio_composition), gnl_filesource);
- gve->priv->gnl_audio_filesources =
- g_list_append (gve->priv->gnl_audio_filesources, gnl_filesource);
- }
+ gst_nle_source_add_item (gve->priv->nle_source, file, title, start,
+ start + duration, rate, FALSE);
GST_INFO ("New segment: start={%" GST_TIME_FORMAT "} duration={%"
GST_TIME_FORMAT "} ", GST_TIME_ARGS (gve->priv->duration),
- GST_TIME_ARGS (final_duration));
-
- gve->priv->duration += final_duration;
- gve->priv->segments++;
-
- gve->priv->titles = g_list_append (gve->priv->titles, title);
- gve->priv->stop_times[gve->priv->segments - 1] = gve->priv->duration;
+ GST_TIME_ARGS (duration));
- g_free (element_name);
+ gve->priv->duration += duration;
}
-
void
gst_video_editor_add_image_segment (GstVideoEditor * gve, gchar * file,
guint64 start, gint64 duration, gchar * title)
{
GstState cur_state;
- GstElement *gnl_filesource = NULL;
- GstElement *imagesourcebin = NULL;
- GstElement *audiotestsrc = NULL;
- GstCaps *filter = NULL;
- gchar *element_name = NULL;
- gchar *desc = NULL;
g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));
@@ -710,112 +545,39 @@ gst_video_editor_add_image_segment (GstVideoEditor * gve, gchar * file,
duration = duration * GST_MSECOND;
start = start * GST_MSECOND;
- /* Video */
- /* gnlsource */
- filter = gst_caps_from_string ("video/x-raw-rgb;video/x-raw-yuv");
- element_name = g_strdup_printf ("gnlvideofilesource%d", gve->priv->segments);
- gnl_filesource = gst_element_factory_make ("gnlsource", element_name);
- g_object_set (G_OBJECT (gnl_filesource),
- "media-start", start,
- "media-duration", duration,
- "start", gve->priv->duration,
- "duration", duration, "caps", filter, NULL);
- g_free(element_name);
- /* filesrc ! pngdec ! ffmpegcolorspace ! imagefreeze */
- desc = g_strdup_printf("filesrc location=%s ! pngdec ! videoscale ! ffmpegcolorspace ! video/x-raw-rgb,
pixel-aspect-ratio=1/1 ! imagefreeze ", file);
- imagesourcebin = gst_parse_bin_from_description(desc, TRUE, NULL);
- g_free(desc);
- gst_bin_add (GST_BIN (gnl_filesource), imagesourcebin);
- gst_bin_add (GST_BIN (gve->priv->gnl_video_composition), gnl_filesource);
- gve->priv->gnl_video_filesources =
- g_list_append (gve->priv->gnl_video_filesources, gnl_filesource);
-
- /* Audio */
- if (gve->priv->audio_enabled) {
- element_name =
- g_strdup_printf ("gnlaudiofakesource%d", gve->priv->segments);
- gnl_filesource = gst_element_factory_make ("gnlsource", element_name);
- g_free (element_name);
- element_name = g_strdup_printf ("audiotestsource%d", gve->priv->segments);
- audiotestsrc = gst_element_factory_make ("audiotestsrc", element_name);
- g_free (element_name);
- g_object_set (G_OBJECT (audiotestsrc), "volume", (double) 0, NULL);
- gst_bin_add (GST_BIN (gnl_filesource), audiotestsrc);
- filter = gst_caps_from_string ("audio/x-raw-float;audio/x-raw-int");
- g_object_set (G_OBJECT (gnl_filesource),
- "media-start", start,
- "media-duration", duration,
- "start", gve->priv->duration,
- "duration", duration, "caps", filter, NULL);
- gst_bin_add (GST_BIN (gve->priv->gnl_audio_composition), gnl_filesource);
- gve->priv->gnl_audio_filesources =
- g_list_append (gve->priv->gnl_audio_filesources, gnl_filesource);
- }
+ gst_nle_source_add_item (gve->priv->nle_source, file, title, start,
+ start + duration, 1, TRUE);
GST_INFO ("New segment: start={%" GST_TIME_FORMAT "} duration={%"
GST_TIME_FORMAT "} ", GST_TIME_ARGS (gve->priv->duration),
GST_TIME_ARGS (duration));
gve->priv->duration += duration;
- gve->priv->segments++;
-
- gve->priv->titles = g_list_append (gve->priv->titles, title);
- gve->priv->stop_times[gve->priv->segments - 1] = gve->priv->duration;
-
}
void
gst_video_editor_clear_segments_list (GstVideoEditor * gve)
{
- GList *tmp = NULL;
-
g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));
GST_INFO_OBJECT (gve, "Clearing list of segments");
gst_video_editor_cancel (gve);
+ gst_element_set_state (gve->priv->main_pipeline, GST_STATE_NULL);
- tmp = gve->priv->gnl_video_filesources;
-
- for (; tmp; tmp = g_list_next (tmp)) {
- GstElement *object = (GstElement *) tmp->data;
- if (object)
- gst_element_set_state (object, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (gve->priv->gnl_video_composition), object);
- }
-
- tmp = gve->priv->gnl_audio_filesources;
-
- for (; tmp; tmp = g_list_next (tmp)) {
- GstElement *object = (GstElement *) tmp->data;
- if (object)
- gst_element_set_state (object, GST_STATE_NULL);
- gst_bin_remove (GST_BIN (gve->priv->gnl_audio_composition), object);
- }
-
- g_list_free (tmp);
- g_list_free (gve->priv->gnl_video_filesources);
- g_list_free (gve->priv->gnl_audio_filesources);
- g_free (gve->priv->stop_times);
- g_list_free (gve->priv->titles);
-
- gve->priv->gnl_video_filesources = NULL;
- gve->priv->gnl_audio_filesources = NULL;
- gve->priv->stop_times = (gint64 *) malloc (200 * sizeof (gint64));
- gve->priv->titles = NULL;
-
+ gve->priv->nle_source = NULL;
gve->priv->duration = 0;
- gve->priv->active_segment = 0;
}
void
gst_video_editor_start (GstVideoEditor * gve)
{
GError *error = NULL;
+ GstPad *pad;
g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));
- GST_INFO_OBJECT(gve, "Starting");
+ GST_INFO_OBJECT (gve, "Starting. output file: %s", gve->priv->output_file);
/* Create elements */
gve->priv->muxer = lgm_create_muxer (gve->priv->muxer_type, GVE_ERROR,
@@ -834,24 +596,23 @@ gst_video_editor_start (GstVideoEditor * gve)
/* Link elements */
gst_bin_add_many (GST_BIN (gve->priv->main_pipeline),
- gve->priv->gnl_video_composition,
- gve->priv->vencode_bin,
- gve->priv->muxer, gve->priv->file_sink, NULL);
+ GST_ELEMENT (gve->priv->nle_source),
+ gve->priv->vencode_bin, gve->priv->muxer, gve->priv->file_sink, NULL);
gst_element_link_many (gve->priv->vencode_bin,
gve->priv->muxer, gve->priv->file_sink, NULL);
if (gve->priv->audio_enabled) {
- gst_bin_add (GST_BIN (gve->priv->main_pipeline),
- gve->priv->gnl_audio_composition);
gve_create_audio_encode_bin (gve);
- gst_bin_add (GST_BIN (gve->priv->main_pipeline),
- gve->priv->aencode_bin);
+ gst_bin_add (GST_BIN (gve->priv->main_pipeline), gve->priv->aencode_bin);
gst_element_link (gve->priv->aencode_bin, gve->priv->muxer);
- g_print ("ADDing audio encode bin and linking to muxer\n");
}
- gve_set_overlay_title (gve, (gchar *) g_list_nth_data (gve->priv->titles, 0));
+ gve->priv->last_pos = 0;
+ pad = gst_element_get_static_pad (gve->priv->file_sink, "sink");
+ gst_pad_add_buffer_probe (pad, (GCallback) gve_on_buffer_cb, gve);
+ gst_object_unref (pad);
+
gst_element_set_state (gve->priv->main_pipeline, GST_STATE_PLAYING);
g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) 0);
}
@@ -861,7 +622,7 @@ gst_video_editor_cancel (GstVideoEditor * gve)
{
g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));
- GST_INFO_OBJECT(gve, "Cancelling");
+ GST_INFO_OBJECT (gve, "Cancelling");
if (gve->priv->update_id > 0) {
g_source_remove (gve->priv->update_id);
gve->priv->update_id = 0;
@@ -878,7 +639,7 @@ gst_video_editor_init_backend (int *argc, char ***argv)
void
gst_video_editor_set_encoding_format (GstVideoEditor * gve,
- gchar *output_file,
+ gchar * output_file,
VideoEncoderType video_codec, AudioEncoderType audio_codec,
VideoMuxerType muxer, guint video_quality, guint audio_quality,
guint width, guint height, guint fps_n, guint fps_d,
@@ -896,6 +657,9 @@ gst_video_editor_set_encoding_format (GstVideoEditor * gve,
gve->priv->fps_d = fps_d;
gve->priv->audio_enabled = enable_audio;
gve->priv->title_enabled = enable_title;
+
+ gst_nle_source_configure (gve->priv->nle_source, width, height, fps_n, fps_d,
+ enable_title, enable_audio);
}
GstVideoEditor *
@@ -917,28 +681,14 @@ gst_video_editor_new (GError ** err)
}
/* Create elements */
- gve->priv->gnl_video_composition =
- gst_element_factory_make ("gnlcomposition", "gnl-video-composition");
- gve->priv->gnl_audio_composition =
- gst_element_factory_make ("gnlcomposition", "gnl-audio-composition");
- if (!gve->priv->gnl_video_composition || !gve->priv->gnl_audio_composition) {
- g_set_error (err, GVE_ERROR, GST_ERROR_PLUGIN_LOAD,
- ("Failed to create a Gnonlin element. "
- "Please check your GStreamer installation."));
- g_object_ref_sink (gve);
- g_object_unref (gve);
- return NULL;
- }
+ gve->priv->nle_source = gst_nle_source_new ();
- /*Connect bus signals */
- /*Wait for a "new-decoded-pad" message to link the composition with
- the encoder tail */
+ /* Listen for a "pad-added" to link the composition with the encoder tail */
gve->priv->bus = gst_element_get_bus (GST_ELEMENT (gve->priv->main_pipeline));
- g_signal_connect (gve->priv->gnl_video_composition, "pad-added",
- G_CALLBACK (new_decoded_pad_cb), gve);
- g_signal_connect (gve->priv->gnl_audio_composition, "pad-added",
+ g_signal_connect (gve->priv->nle_source, "pad-added",
G_CALLBACK (new_decoded_pad_cb), gve);
+ /*Connect bus signals */
gst_bus_add_signal_watch (gve->priv->bus);
gve->priv->sig_bus_async = g_signal_connect (gve->priv->bus, "message",
G_CALLBACK (gve_bus_message_cb), gve);
diff --git a/libcesarplayer/video-utils.c b/libcesarplayer/video-utils.c
index dee64b3..d2e7664 100644
--- a/libcesarplayer/video-utils.c
+++ b/libcesarplayer/video-utils.c
@@ -503,21 +503,20 @@ GstElement * lgm_create_audio_encoder (AudioEncoderType type, guint quality,
switch (type) {
case AUDIO_ENCODER_MP3:
encoder = gst_element_factory_make ("lamemp3enc", "audio-encoder");
- g_object_set (encoder, "target", 0,
- "quality", (gfloat) quality * 10 / 100, NULL);
+ g_object_set (encoder, "target", 0, "quality", (gfloat)4, NULL);
name = "Mp3 audio encoder";
break;
case AUDIO_ENCODER_AAC:
encoder = gst_element_factory_make ("faac", "audio-encoder");
- g_object_set (encoder, "bitrate", quality * 320000 / 100, NULL);
+ g_object_set (encoder, "bitrate", 128000, NULL);
name = "AAC audio encoder";
break;
case AUDIO_ENCODER_VORBIS:
default:
encoder = gst_element_factory_make ("vorbisenc", "audio-encoder");
- g_object_set (encoder, "quality", (gfloat) quality / 100, NULL);
+ g_object_set (encoder, "quality", 0.3, NULL);
name = "Vorbis audio encoder";
break;
}
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]