[longomatch] Add support for exporting drawings to the video file
- From: Andoni Morales Alastruey <amorales src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [longomatch] Add support for exporting drawings to the video file
- Date: Sat, 11 Aug 2012 15:05:21 +0000 (UTC)
commit 91a7cbb3fcd995d74904c19b960ec9cef3b1f3e2
Author: Andoni Morales Alastruey <ylatuya gmail com>
Date: Sat Aug 11 15:27:29 2012 +0200
Add support for exporting drawings to the video file
LongoMatch.Core/Common/Image.cs | 10 ++
.../Interfaces/Multimedia/IVideoEditor.cs | 2 +
LongoMatch.Multimedia/Editor/GstVideoSplitter.cs | 6 +
.../Services/RenderingJobsManager.cs | 59 ++++++++++--
libcesarplayer/gst-video-editor.c | 99 ++++++++++++++++++--
libcesarplayer/gst-video-editor.h | 27 +++---
6 files changed, 175 insertions(+), 28 deletions(-)
---
diff --git a/LongoMatch.Core/Common/Image.cs b/LongoMatch.Core/Common/Image.cs
index a0882c0..940cfca 100644
--- a/LongoMatch.Core/Common/Image.cs
+++ b/LongoMatch.Core/Common/Image.cs
@@ -88,6 +88,16 @@ namespace LongoMatch.Common
}
}
+ public static Image Composite(Image image1, Image image2) {
+ SImage dest = new SImage(image1.Value.Colorspace, true, image1.Value.BitsPerSample,
+ image1.Width, image1.Height);
+ image1.Value.Composite(dest, 0, 0, image2.Width, image2.Height, 0, 0, 1, 1,
+ Gdk.InterpType.Bilinear, 255);
+ image2.Value.Composite(dest, 0, 0, image2.Width, image2.Height, 0, 0, 1, 1,
+ Gdk.InterpType.Bilinear, 255);
+ return new Image(dest);
+ }
+
#else
public byte[] Serialize () {
if (image == null)
diff --git a/LongoMatch.Core/Interfaces/Multimedia/IVideoEditor.cs b/LongoMatch.Core/Interfaces/Multimedia/IVideoEditor.cs
index f57f507..0b20283 100644
--- a/LongoMatch.Core/Interfaces/Multimedia/IVideoEditor.cs
+++ b/LongoMatch.Core/Interfaces/Multimedia/IVideoEditor.cs
@@ -47,6 +47,8 @@ namespace LongoMatch.Interfaces.Multimedia
}
void AddSegment(string filePath, long start, long duration, double rate, string title, bool hasAudio) ;
+
+ void AddImageSegment(string filePath, long start, long duration, string title) ;
void ClearList();
diff --git a/LongoMatch.Multimedia/Editor/GstVideoSplitter.cs b/LongoMatch.Multimedia/Editor/GstVideoSplitter.cs
index 9638c69..af9ea5e 100644
--- a/LongoMatch.Multimedia/Editor/GstVideoSplitter.cs
+++ b/LongoMatch.Multimedia/Editor/GstVideoSplitter.cs
@@ -295,6 +295,12 @@ namespace LongoMatch.Video.Editor {
gst_video_editor_add_segment(Handle, filePath, start, duration, rate, GLib.Marshaller.StringToPtrGStrdup(title), true);
}
+ [DllImport("libcesarplayer.dll")]
+ static extern void gst_video_editor_add_image_segment(IntPtr raw, string file_path, long start, long duration, IntPtr title);
+
+ public void AddImageSegment(string filePath, long start, long duration, string title) {
+ gst_video_editor_add_image_segment(Handle, filePath, start, duration, GLib.Marshaller.StringToPtrGStrdup(title));
+ }
[DllImport("libcesarplayer.dll")]
static extern void gst_video_editor_start(IntPtr raw);
diff --git a/LongoMatch.Services/Services/RenderingJobsManager.cs b/LongoMatch.Services/Services/RenderingJobsManager.cs
index 4398f51..fe5cf27 100644
--- a/LongoMatch.Services/Services/RenderingJobsManager.cs
+++ b/LongoMatch.Services/Services/RenderingJobsManager.cs
@@ -32,6 +32,7 @@ namespace LongoMatch.Services
/* List of pending jobs */
List<Job> jobs, pendingJobs;
IVideoEditor videoEditor;
+ IFramesCapturer capturer;
Job currentJob;
IRenderingStateBar stateBar;
IMultimediaToolkit multimediaToolkit;
@@ -42,6 +43,7 @@ namespace LongoMatch.Services
this.guiToolkit = guiToolkit;
this.multimediaToolkit = multimediaToolkit;
this.stateBar = guiToolkit.MainWindow.RenderingStateBar;
+ capturer = multimediaToolkit.GetFramesCapturer();
jobs = new List<Job>();
pendingJobs = new List<Job>();
stateBar.Cancel += (sender, e) => CancelCurrentJob();
@@ -126,16 +128,59 @@ namespace LongoMatch.Services
private void LoadJob(Job job) {
foreach(PlayListPlay segment in job.Playlist) {
- if(segment.Valid)
- videoEditor.AddSegment(segment.MediaFile.FilePath,
- segment.Start.MSeconds,
- segment.Duration.MSeconds,
- segment.Rate,
- segment.Name,
- segment.MediaFile.HasAudio);
+ if (!ProcessSegment(segment))
+ continue;
}
}
+ private bool ProcessSegment(PlayListPlay segment) {
+ if(!segment.Valid)
+ return false;
+
+ Log.Debug(String.Format("Adding segment with {0} drawings", segment.Drawings.Count));
+ if (segment.Drawings.Count >= 1) {
+ Drawing drawing = segment.Drawings[0];
+ string image_path = CreateStillImage(segment.MediaFile.FilePath, drawing);
+
+ videoEditor.AddSegment(segment.MediaFile.FilePath,
+ segment.Start.MSeconds,
+ drawing.RenderTime - segment.Start.MSeconds,
+ segment.Rate,
+ segment.Name,
+ segment.MediaFile.HasAudio);
+ videoEditor.AddImageSegment(image_path,
+ drawing.RenderTime,
+ drawing.PauseTime,
+ segment.Name);
+ videoEditor.AddSegment(segment.MediaFile.FilePath,
+ drawing.RenderTime,
+ segment.Stop.MSeconds - drawing.RenderTime,
+ segment.Rate,
+ segment.Name,
+ segment.MediaFile.HasAudio);
+ } else {
+ videoEditor.AddSegment(segment.MediaFile.FilePath,
+ segment.Start.MSeconds,
+ segment.Duration.MSeconds,
+ segment.Rate,
+ segment.Name,
+ segment.MediaFile.HasAudio);
+ }
+ return true;
+ }
+
+ private string CreateStillImage(string filename, Drawing drawing) {
+ Image frame, final_image;
+ string path = System.IO.Path.GetTempFileName();
+
+ capturer.Open(filename);
+ capturer.SeekTime(drawing.RenderTime, true);
+ frame = capturer.GetCurrentFrame();
+ final_image = Image.Composite(frame, drawing.Pixbuf);
+ final_image.Save(path);
+ return path;
+ }
+
private void CloseAndNext() {
RemoveCurrentFromPending();
UpdateJobsStatus();
diff --git a/libcesarplayer/gst-video-editor.c b/libcesarplayer/gst-video-editor.c
index 297ea3d..7dea513 100644
--- a/libcesarplayer/gst-video-editor.c
+++ b/libcesarplayer/gst-video-editor.c
@@ -856,15 +856,17 @@ gst_video_editor_add_segment (GstVideoEditor * gve, gchar * file,
return;
}
- final_duration = GST_MSECOND * duration / rate;
+ start = GST_MSECOND * start;
+ duration = GST_MSECOND * duration;
+ final_duration = duration / rate;
/* Video */
filter = gst_caps_from_string ("video/x-raw-rgb;video/x-raw-yuv");
element_name = g_strdup_printf ("gnlvideofilesource%d", gve->priv->segments);
gnl_filesource = gst_element_factory_make ("gnlfilesource", element_name);
g_object_set (G_OBJECT (gnl_filesource), "location", file,
- "media-start", GST_MSECOND * start,
- "media-duration", GST_MSECOND * duration,
+ "media-start", start,
+ "media-duration", duration,
"start", gve->priv->duration,
"duration", final_duration, "caps", filter, NULL);
if (gve->priv->segments == 0) {
@@ -893,26 +895,107 @@ gst_video_editor_add_segment (GstVideoEditor * gve, gchar * file,
}
filter = gst_caps_from_string ("audio/x-raw-float;audio/x-raw-int");
g_object_set (G_OBJECT (gnl_filesource),
- "media-start", GST_MSECOND * start,
- "media-duration", GST_MSECOND * duration,
+ "media-start", start,
+ "media-duration", duration,
"start", gve->priv->duration,
"duration", final_duration, "caps", filter, NULL);
gst_bin_add (GST_BIN (gve->priv->gnl_audio_composition), gnl_filesource);
gve->priv->gnl_audio_filesources =
g_list_append (gve->priv->gnl_audio_filesources, gnl_filesource);
+ GST_INFO ("New segment: start={%" GST_TIME_FORMAT "} duration={%"
+ GST_TIME_FORMAT "} ", GST_TIME_ARGS (gve->priv->duration),
+ GST_TIME_ARGS (final_duration));
+
gve->priv->duration += final_duration;
gve->priv->segments++;
gve->priv->titles = g_list_append (gve->priv->titles, title);
gve->priv->stop_times[gve->priv->segments - 1] = gve->priv->duration;
- GST_INFO ("New segment: start={%" GST_TIME_FORMAT "} duration={%"
- GST_TIME_FORMAT "} ", GST_TIME_ARGS (start * GST_MSECOND),
- GST_TIME_ARGS (duration * GST_MSECOND));
g_free (element_name);
}
+
+void
+gst_video_editor_add_image_segment (GstVideoEditor * gve, gchar * file,
+ guint64 start, gint64 duration, gchar * title)
+{
+ GstState cur_state;
+ GstElement *gnl_filesource = NULL;
+ GstElement *imagesourcebin = NULL;
+ GstElement *filesource = NULL;
+ GstElement *decoder = NULL;
+ GstElement *colorspace = NULL;
+ GstElement *imagefreeze = NULL;
+ GstElement *audiotestsrc = NULL;
+ GstCaps *filter = NULL;
+ gchar *element_name = NULL;
+ gchar *desc = NULL;
+
+ g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));
+
+ gst_element_get_state (gve->priv->main_pipeline, &cur_state, NULL, 0);
+ if (cur_state > GST_STATE_READY) {
+ GST_WARNING ("Segments can only be added for a state <= GST_STATE_READY");
+ return;
+ }
+
+ duration = duration * GST_MSECOND;
+ start = start * GST_MSECOND;
+
+ /* Video */
+ /* gnlsource */
+ filter = gst_caps_from_string ("video/x-raw-rgb;video/x-raw-yuv");
+ element_name = g_strdup_printf ("gnlvideofilesource%d", gve->priv->segments);
+ gnl_filesource = gst_element_factory_make ("gnlsource", element_name);
+ g_object_set (G_OBJECT (gnl_filesource),
+ "media-start", start,
+ "media-duration", duration,
+ "start", gve->priv->duration,
+ "duration", duration, "caps", filter, NULL);
+ g_free(element_name);
+ /* filesrc ! pngdec ! ffmpegcolorspace ! imagefreeze */
+ desc = g_strdup_printf("filesrc location=%s ! pngdec ! videoscale ! ffmpegcolorspace ! video/x-raw-rgb, pixel-aspect-ratio=1/1 ! imagefreeze ", file);
+ imagesourcebin = gst_parse_bin_from_description(desc, TRUE, NULL);
+ g_free(desc);
+ gst_bin_add (GST_BIN (gnl_filesource), imagesourcebin);
+ gst_bin_add (GST_BIN (gve->priv->gnl_video_composition), gnl_filesource);
+ gve->priv->gnl_video_filesources =
+ g_list_append (gve->priv->gnl_video_filesources, gnl_filesource);
+
+ /* Audio */
+ element_name =
+ g_strdup_printf ("gnlaudiofakesource%d", gve->priv->segments);
+ gnl_filesource = gst_element_factory_make ("gnlsource", element_name);
+ g_free (element_name);
+ element_name = g_strdup_printf ("audiotestsource%d", gve->priv->segments);
+ audiotestsrc = gst_element_factory_make ("audiotestsrc", element_name);
+ g_free (element_name);
+ g_object_set (G_OBJECT (audiotestsrc), "volume", (double) 0, NULL);
+ gst_bin_add (GST_BIN (gnl_filesource), audiotestsrc);
+ filter = gst_caps_from_string ("audio/x-raw-float;audio/x-raw-int");
+ g_object_set (G_OBJECT (gnl_filesource),
+ "media-start", start,
+ "media-duration", duration,
+ "start", gve->priv->duration,
+ "duration", duration, "caps", filter, NULL);
+ gst_bin_add (GST_BIN (gve->priv->gnl_audio_composition), gnl_filesource);
+ gve->priv->gnl_audio_filesources =
+ g_list_append (gve->priv->gnl_audio_filesources, gnl_filesource);
+
+ GST_INFO ("New segment: start={%" GST_TIME_FORMAT "} duration={%"
+ GST_TIME_FORMAT "} ", GST_TIME_ARGS (gve->priv->duration),
+ GST_TIME_ARGS (duration));
+
+ gve->priv->duration += duration;
+ gve->priv->segments++;
+
+ gve->priv->titles = g_list_append (gve->priv->titles, title);
+ gve->priv->stop_times[gve->priv->segments - 1] = gve->priv->duration;
+
+}
+
void
gst_video_editor_clear_segments_list (GstVideoEditor * gve)
{
diff --git a/libcesarplayer/gst-video-editor.h b/libcesarplayer/gst-video-editor.h
index 85d7819..e8f32f9 100644
--- a/libcesarplayer/gst-video-editor.h
+++ b/libcesarplayer/gst-video-editor.h
@@ -32,6 +32,7 @@
#include <glib-object.h>
#include <gtk/gtk.h>
+#include <gdk/gdk.h>
#include "common.h"
@@ -62,24 +63,24 @@ struct _GstVideoEditor
GstVideoEditorPrivate *priv;
};
-EXPORT GType
-gst_video_editor_get_type (void)
- G_GNUC_CONST;
+EXPORT GType gst_video_editor_get_type (void) G_GNUC_CONST;
- EXPORT void gst_video_editor_init_backend (int *argc, char ***argv);
- EXPORT GstVideoEditor *gst_video_editor_new (GError ** err);
- EXPORT void gst_video_editor_start (GstVideoEditor * gve);
- EXPORT void gst_video_editor_cancel (GstVideoEditor * gve);
- EXPORT void gst_video_editor_set_video_encoder (GstVideoEditor * gve,
+
+EXPORT void gst_video_editor_init_backend (int *argc, char ***argv);
+EXPORT GstVideoEditor *gst_video_editor_new (GError ** err);
+EXPORT void gst_video_editor_start (GstVideoEditor * gve);
+EXPORT void gst_video_editor_cancel (GstVideoEditor * gve);
+EXPORT void gst_video_editor_set_video_encoder (GstVideoEditor * gve,
gchar ** err, VideoEncoderType codec);
- EXPORT void gst_video_editor_set_audio_encoder (GstVideoEditor * gve,
+EXPORT void gst_video_editor_set_audio_encoder (GstVideoEditor * gve,
gchar ** err, AudioEncoderType codec);
- EXPORT void gst_video_editor_set_video_muxer (GstVideoEditor * gve,
+EXPORT void gst_video_editor_set_video_muxer (GstVideoEditor * gve,
gchar ** err, VideoMuxerType codec);
- EXPORT void gst_video_editor_clear_segments_list (GstVideoEditor * gve);
- EXPORT void gst_video_editor_add_segment (GstVideoEditor * gve,
+EXPORT void gst_video_editor_clear_segments_list (GstVideoEditor * gve);
+EXPORT void gst_video_editor_add_segment (GstVideoEditor * gve,
gchar * file, gint64 start,
gint64 duration, gdouble rate, gchar * title, gboolean hasAudio);
-
+EXPORT void gst_video_editor_add_image_segment (GstVideoEditor * gve, gchar * file,
+ guint64 start, gint64 duration, gchar * title);
G_END_DECLS
#endif /* _GST_VIDEO_EDITOR_H_ */
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]