[longomatch/camera_capturer: 3/3] WIP



commit 567af0ab74b29fd4edad9460c9214a6ec4824acf
Author: Andoni Morales Alastruey <ylatuya gmail com>
Date:   Wed Oct 3 02:22:43 2012 +0200

    WIP

 LongoMatch.Core/Common/CaptureSettings.cs          |    2 +-
 LongoMatch.Core/Common/Device.cs                   |   14 +-
 LongoMatch.Core/Common/Enums.cs                    |    9 +-
 .../Gui/Component/ProjectDetailsWidget.cs          |   17 +-
 .../gtk-gui/LongoMatch.Gui.MainWindow.cs           |    2 +-
 LongoMatch.GUI/gtk-gui/objects.xml                 |   34 ++--
 .../Capturer/GstCameraCapturer.cs                  |   30 ---
 LongoMatch.Multimedia/Utils/VideoDevice.cs         |   40 ++--
 configure.ac                                       |    2 +-
 libcesarplayer/gst-camera-capturer.c               |  240 +++++++++-----------
 10 files changed, 154 insertions(+), 236 deletions(-)
---
diff --git a/LongoMatch.Core/Common/CaptureSettings.cs b/LongoMatch.Core/Common/CaptureSettings.cs
index 1932c1f..4f5a7fd 100644
--- a/LongoMatch.Core/Common/CaptureSettings.cs
+++ b/LongoMatch.Core/Common/CaptureSettings.cs
@@ -29,7 +29,7 @@ namespace LongoMatch.Common
 
 		public static CaptureSettings DefaultSettings() {
 			CaptureSettings settings = new CaptureSettings();
-			settings.CaptureSourceType = CaptureSourceType.Raw;
+			settings.CaptureSourceType = CaptureSourceType.System;
 			settings.EncodingSettings = new EncodingSettings(VideoStandards.P480_4_3,
 			                                                 EncodingProfiles.MP4,
 			                                                 25, 1, 1000, 128, "", 20);
diff --git a/LongoMatch.Core/Common/Device.cs b/LongoMatch.Core/Common/Device.cs
index e27c303..e227865 100644
--- a/LongoMatch.Core/Common/Device.cs
+++ b/LongoMatch.Core/Common/Device.cs
@@ -32,9 +32,9 @@ namespace LongoMatch.Common
 		}
 
 		/// <summary>
-		/// Device Type among Video, Audio or DV (for dv cameras)
+		/// Capture source type
 		/// </summary>
-		public DeviceType DeviceType {
+		public CaptureSourceType DeviceType {
 			get;
 			set;
 		}
@@ -47,15 +47,5 @@ namespace LongoMatch.Common
 			get;
 			set;
 		}
-
-		/// <summary>
-		/// The name of the gstreamer element property used to set the device
-		/// </summary>
-
-		public string IDProperty {
-			get;
-			set;
-		}
-
 	}
 }
diff --git a/LongoMatch.Core/Common/Enums.cs b/LongoMatch.Core/Common/Enums.cs
index f0bb2b4..2ca7ca6 100644
--- a/LongoMatch.Core/Common/Enums.cs
+++ b/LongoMatch.Core/Common/Enums.cs
@@ -108,14 +108,7 @@ namespace LongoMatch.Common
 	public enum CaptureSourceType {
 		None,
 		DV,
-		Raw,
-		DShow
-	}
-	
-	public enum DeviceType {
-		Video,
-		Audio,
-		DV
+		System,
 	}
 	
 	public enum GameUnitEventType {
diff --git a/LongoMatch.GUI/Gui/Component/ProjectDetailsWidget.cs b/LongoMatch.GUI/Gui/Component/ProjectDetailsWidget.cs
index aae413d..6932b61 100644
--- a/LongoMatch.GUI/Gui/Component/ProjectDetailsWidget.cs
+++ b/LongoMatch.GUI/Gui/Component/ProjectDetailsWidget.cs
@@ -235,15 +235,7 @@ namespace LongoMatch.Gui.Component
 				encSettings.OutputFile = fileEntry.Text;
 				encSettings.AudioBitrate = (uint)audiobitratespinbutton.Value;
 				encSettings.VideoBitrate = (uint)videobitratespinbutton.Value;
-				if(videoDevices[devicecombobox.Active].DeviceType == DeviceType.DV) {
-					if(Environment.OSVersion.Platform == PlatformID.Win32NT)
-						s.CaptureSourceType = CaptureSourceType.DShow;
-					else
-						s.CaptureSourceType = CaptureSourceType.DV;
-				}
-				else {
-					s.CaptureSourceType = CaptureSourceType.Raw;
-				}
+				s.CaptureSourceType = videoDevices[devicecombobox.Active].DeviceType;
 				s.DeviceID = videoDevices[devicecombobox.Active].ID;
 				
 				/* Get size info */
@@ -354,12 +346,15 @@ namespace LongoMatch.Gui.Component
 				string deviceElement;
 				string deviceName;
 				if(Environment.OSVersion.Platform == PlatformID.Unix) {
-					if(device.DeviceType == DeviceType.DV)
+					if(device.DeviceType == CaptureSourceType.DV)
 						deviceElement = Catalog.GetString(DV_SOURCE);
 					else
 						deviceElement = Catalog.GetString(GCONF_SOURCE);
-				} else
+				} else if (Environment.OSVersion.Platform == PlatformID.Win32NT) {
 					deviceElement = Catalog.GetString("DirectShow Source");
+				} else {
+					deviceElement = Catalog.GetString("OS X Source");
+				}
 				deviceName = (device.ID == "") ? Catalog.GetString("Unknown"): device.ID;
 				devicecombobox.AppendText(deviceName + " ("+deviceElement+")");
 				devicecombobox.Active = 0;
diff --git a/LongoMatch.GUI/gtk-gui/LongoMatch.Gui.MainWindow.cs b/LongoMatch.GUI/gtk-gui/LongoMatch.Gui.MainWindow.cs
index 781fd51..761b72e 100644
--- a/LongoMatch.GUI/gtk-gui/LongoMatch.Gui.MainWindow.cs
+++ b/LongoMatch.GUI/gtk-gui/LongoMatch.Gui.MainWindow.cs
@@ -140,7 +140,7 @@ namespace LongoMatch.Gui
 			this.ManualTaggingViewAction.ShortLabel = global::Mono.Unix.Catalog.GetString ("Free Capture Mode");
 			w1.Add (this.ManualTaggingViewAction, "<Control>f");
 			this.GameUnitsViewAction = new global::Gtk.RadioAction ("GameUnitsViewAction", global::Mono.Unix.Catalog.GetString ("Game units view"), null, null, 0);
-			this.GameUnitsViewAction.Group = this.TaggingViewAction.Group;
+			this.GameUnitsViewAction.Group = this.ManualTaggingViewAction.Group;
 			this.GameUnitsViewAction.Sensitive = false;
 			this.GameUnitsViewAction.ShortLabel = global::Mono.Unix.Catalog.GetString ("Game units view");
 			w1.Add (this.GameUnitsViewAction, null);
diff --git a/LongoMatch.GUI/gtk-gui/objects.xml b/LongoMatch.GUI/gtk-gui/objects.xml
index f943be7..0d24ee1 100644
--- a/LongoMatch.GUI/gtk-gui/objects.xml
+++ b/LongoMatch.GUI/gtk-gui/objects.xml
@@ -244,23 +244,6 @@
       </itemgroup>
     </signals>
   </object>
-  <object type="LongoMatch.Gui.Component.ProjectDetailsWidget" palette-category="LongoMatch" allow-children="false" base-type="Gtk.Bin">
-    <itemgroups>
-      <itemgroup label="ProjectDetailsWidget Properties">
-        <property name="Edited" />
-        <property name="Season" />
-        <property name="Competition" />
-        <property name="LocalGoals" />
-        <property name="VisitorGoals" />
-        <property name="Date" />
-      </itemgroup>
-    </itemgroups>
-    <signals>
-      <itemgroup label="ProjectDetailsWidget Signals">
-        <signal name="EditedEvent" />
-      </itemgroup>
-    </signals>
-  </object>
   <object type="LongoMatch.Gui.Component.PlayersFilterTreeView" palette-category="LongoMatch" allow-children="false" base-type="Gtk.TreeView">
     <itemgroups />
     <signals />
@@ -278,4 +261,21 @@
       </itemgroup>
     </signals>
   </object>
+  <object type="LongoMatch.Gui.Component.ProjectDetailsWidget" palette-category="LongoMatch" allow-children="false" base-type="Gtk.Bin">
+    <itemgroups>
+      <itemgroup label="ProjectDetailsWidget Properties">
+        <property name="Edited" />
+        <property name="Season" />
+        <property name="Competition" />
+        <property name="LocalGoals" />
+        <property name="VisitorGoals" />
+        <property name="Date" />
+      </itemgroup>
+    </itemgroups>
+    <signals>
+      <itemgroup label="ProjectDetailsWidget Signals">
+        <signal name="EditedEvent" />
+      </itemgroup>
+    </signals>
+  </object>
 </objects>
\ No newline at end of file
diff --git a/LongoMatch.Multimedia/Capturer/GstCameraCapturer.cs b/LongoMatch.Multimedia/Capturer/GstCameraCapturer.cs
index 2e29521..d41dfe5 100644
--- a/LongoMatch.Multimedia/Capturer/GstCameraCapturer.cs
+++ b/LongoMatch.Multimedia/Capturer/GstCameraCapturer.cs
@@ -491,35 +491,5 @@ namespace LongoMatch.Video.Capturer {
 		}
 		#endregion
 		
-		static public List<Device> ListVideoDevices() {
-			List<Device> devicesList  = new List<Device>();
-
-			/* Generate the list of devices and add the gconf one at the bottom
-			 * so that DV sources are always selected before, at least on Linux,
-			 * since on Windows both raw an dv sources are listed from the same
-			 * source element (dshowvideosrc) */
-			foreach(string devName in GstCameraCapturer.VideoDevices) {
-				string idProp;
-
-				if(Environment.OSVersion.Platform == PlatformID.Unix)
-					idProp = VideoConstants.DV1394SRC_PROP;
-				else
-					idProp = VideoConstants.DSHOWVIDEOSINK_PROP;
-
-				devicesList.Add(new Device {
-					ID = devName,
-					IDProperty = idProp,
-					DeviceType = DeviceType.DV
-				});
-			}
-			if(Environment.OSVersion.Platform == PlatformID.Unix) {
-				devicesList.Add(new Device {
-					ID = Catalog.GetString("Default device"),
-					IDProperty = "",
-					DeviceType = DeviceType.Video
-				});
-			}
-			return devicesList;
-		}
 	}
 }
diff --git a/LongoMatch.Multimedia/Utils/VideoDevice.cs b/LongoMatch.Multimedia/Utils/VideoDevice.cs
index e6ada1c..5a208f8 100644
--- a/LongoMatch.Multimedia/Utils/VideoDevice.cs
+++ b/LongoMatch.Multimedia/Utils/VideoDevice.cs
@@ -28,35 +28,33 @@ namespace LongoMatch.Multimedia.Utils
 	public class VideoDevice
 	{
 		
-		static public List<Device> ListVideoDevices (){
+		static public List<Device> ListVideoDevices() {
 			List<Device> devicesList  = new List<Device>();
-			
-			/* Generate the list of devices and add the gconf one at the bottom
-			 * so that DV sources are always selected before, at least on Linux, 
-			 * since on Windows both raw an dv sources are listed from the same
-			 * source element (dshowvideosrc) */
-			foreach (string devName in GstCameraCapturer.VideoDevices){
-				string idProp;
-				
-				if (Environment.OSVersion.Platform == PlatformID.Unix)
-					idProp = VideoConstants.DV1394SRC_PROP;
-				else 
-					idProp = VideoConstants.DSHOWVIDEOSINK_PROP;
-				
+
+			/* Generate the list of devices, on the supported platforms
+			 * and extra DV device for the dv1394src element and the default
+			 * OS source for all of them */
+			foreach(string devName in GstCameraCapturer.VideoDevices) {
+				CaptureSourceType source;
+
+				if(Environment.OSVersion.Platform == PlatformID.Unix)
+					source = CaptureSourceType.DV;
+				else
+					source = CaptureSourceType.System;
+
 				devicesList.Add(new Device {
 					ID = devName,
-					IDProperty = idProp,
-					DeviceType = DeviceType.DV});
+					DeviceType = source,
+				});
 			}
-			if (Environment.OSVersion.Platform == PlatformID.Unix){
+			if(Environment.OSVersion.Platform != PlatformID.Win32NT) {
 				devicesList.Add(new Device {
 					ID = Catalog.GetString("Default device"),
-					IDProperty = "",
-					DeviceType = DeviceType.Video});
-			}			
+					DeviceType = CaptureSourceType.System
+				});
+			}
 			return devicesList;
 		}
-
 	}
 }
 
diff --git a/configure.ac b/configure.ac
index 45c559d..830926a 100644
--- a/configure.ac
+++ b/configure.ac
@@ -81,7 +81,7 @@ PKG_CHECK_MODULES([MONO_ADDINS], [mono-addins])
 AC_SUBST(MONO_ADDINS_LIBS)
 
 dnl package checks for libcesarplayer
-PKG_CHECK_MODULES(CESARPLAYER, [gtk+-2.0 >= 2.8 gdk-2.0 gio-2.0 glib-2.0 gstreamer-0.10 gstreamer-audio-0.10 gstreamer-video-0.10 gstreamer-pbutils-0.10 gobject-2.0 gstreamer-interfaces-0.10 gstreamer-tag-0.10])
+PKG_CHECK_MODULES(CESARPLAYER, [gtk+-2.0 >= 2.8 gdk-2.0 gio-2.0 glib-2.0 gstreamer-0.10 gstreamer-audio-0.10 gstreamer-video-0.10 gstreamer-pbutils-0.10 gobject-2.0 gstreamer-interfaces-0.10 gstreamer-tag-0.10 gstreamer-app-0.10])
 AC_SUBST(CESARPLAYER_CFLAGS)
 AC_SUBST(CESARPLAYER_LIBS)
 
diff --git a/libcesarplayer/gst-camera-capturer.c b/libcesarplayer/gst-camera-capturer.c
index 5d07bed..0b2c0d5 100644
--- a/libcesarplayer/gst-camera-capturer.c
+++ b/libcesarplayer/gst-camera-capturer.c
@@ -25,6 +25,7 @@
 #include <string.h>
 #include <stdio.h>
 
+#include <gst/app/gstappsrc.h>
 #include <gst/interfaces/xoverlay.h>
 #include <gst/interfaces/propertyprobe.h>
 #include <gst/gst.h>
@@ -116,6 +117,8 @@ struct GstCameraCapturerPrivate
   GstElement *audio_enc;
   GstElement *muxer;
   GstElement *filesink;
+  GstElement* video_appsrc;
+  GstElement* audio_appsrc;
   const gchar *source_element_name;
 
   /* Recording */
@@ -1157,172 +1160,144 @@ gst_camera_capturer_prepare_mpegts_source (GstCameraCapturer *gcc)
 
 static gboolean
 gst_camera_capturer_encoding_retimestamper (GstCameraCapturer *gcc,
-    GstMiniObject *data, gboolean is_video)
+    GstBuffer *prev_buf, gboolean is_video)
 {
-  GstClockTime buf_ts, new_buf_ts;
-  gboolean ret = FALSE;
+  GstClockTime buf_ts, new_buf_ts, duration;
+  GstBuffer *enc_buf;
 
   g_mutex_lock(gcc->priv->recording_lock);
 
-  /* Event handling, forward everything except new segment events since we are
-   * encoding and we need continuous timestamps on segment starting from 0 */
-  if (GST_IS_EVENT(data)) {
-    GstEvent *event = (GstEvent*) data;
+  if (!gcc->priv->is_recording) {
+    /* Drop buffers if we are not recording */
+    GST_LOG_OBJECT (gcc, "Dropping buffer on %s pad", is_video ? "video": "audio");
+    goto done;
+  }
 
-    if (!gcc->priv->is_recording && !gcc->priv->closing_recording) {
-      GST_LOG_OBJECT (gcc, "Dropping event on %s pad", is_video ? "video": "audio");
-      ret = FALSE;
+  /* If we are just remuxing, drop everything until we see a keyframe */
+  if (gcc->priv->video_needs_keyframe_sync && !gcc->priv->video_synced) {
+    if (is_video && !GST_BUFFER_FLAG_IS_SET(prev_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+      gcc->priv->video_synced = TRUE;
+    } else {
+      GST_LOG_OBJECT (gcc, "Waiting for a keyframe, "
+          "dropping buffer on %s pad", is_video ? "video": "audio");
       goto done;
     }
+  }
 
-    GST_DEBUG_OBJECT (gcc, "Received new event on the %s pad", is_video ? "video": "audio");
-    if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
-      GST_DEBUG_OBJECT (gcc, "Dropping NEWSEGMENT event");
-      ret = FALSE;
-    } else {
-      GST_DEBUG_OBJECT (gcc, "Forwarding event");
-      ret = TRUE;
-    }
-  } else {
-    GstBuffer *buf = (GstBuffer*) data;
+  enc_buf = gst_buffer_create_sub (prev_buf, 0, GST_BUFFER_SIZE(prev_buf));
+  buf_ts = GST_BUFFER_TIMESTAMP (prev_buf);
+  duration = GST_BUFFER_DURATION (prev_buf);
+  if (duration == GST_CLOCK_TIME_NONE)
+    duration = 0;
 
-    if (!gcc->priv->is_recording) {
-      /* Drop buffers if we are not recording */
-      GST_LOG_OBJECT (gcc, "Dropping buffer on %s pad", is_video ? "video": "audio");
-      ret = FALSE;
-    } else {
-      GstClockTime duration;
-
-      /* If we are just remuxing, drop everything until we see a keyframe */
-      if (gcc->priv->video_needs_keyframe_sync && !gcc->priv->video_synced) {
-        if (is_video && !GST_BUFFER_FLAG_IS_SET(buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
-          gcc->priv->video_synced = TRUE;
-        } else {
-          GST_LOG_OBJECT (gcc, "Waiting for a keyframe, "
-              "dropping buffer on %s pad", is_video ? "video": "audio");
-          ret = FALSE;
-          goto done;
-        }
-      }
+  /* Check if it's the first buffer after starting or restarting the capture
+   * and update the timestamps accordingly */
+  if (G_UNLIKELY(gcc->priv->current_recording_start_ts == GST_CLOCK_TIME_NONE)) {
+    gcc->priv->current_recording_start_ts = buf_ts;
+    gcc->priv->last_accum_recorded_ts = gcc->priv->accum_recorded_ts;
+    GST_INFO_OBJECT (gcc, "Starting recording at %" GST_TIME_FORMAT,
+        GST_TIME_ARGS(gcc->priv->last_accum_recorded_ts));
+  }
 
-      buf_ts = GST_BUFFER_TIMESTAMP(buf);
-      duration = GST_BUFFER_DURATION(buf);
-      if (duration == GST_CLOCK_TIME_NONE)
-        duration = 0;
-
-      /* Check if it's the first buffer after starting or restarting the capture
-       * and update the timestamps accordingly */
-      if (G_UNLIKELY(gcc->priv->current_recording_start_ts == GST_CLOCK_TIME_NONE)) {
-        gcc->priv->current_recording_start_ts = buf_ts;
-        gcc->priv->last_accum_recorded_ts = gcc->priv->accum_recorded_ts;
-        GST_INFO_OBJECT (gcc, "Starting recording at %" GST_TIME_FORMAT,
-            GST_TIME_ARGS(gcc->priv->last_accum_recorded_ts));
-      }
+  /* Clip buffers that are not in the segment */
+  if (buf_ts < gcc->priv->current_recording_start_ts) {
+    GST_WARNING_OBJECT (gcc, "Discarding buffer out of segment");
+    goto done;
+  }
 
-      /* Clip buffers that are not in the segment */
-      if (buf_ts < gcc->priv->current_recording_start_ts) {
-        GST_WARNING_OBJECT (gcc, "Discarding buffer out of segment");
-        ret = FALSE;
-        goto done;
-      }
+  if (buf_ts != GST_CLOCK_TIME_NONE) {
+    /* Get the buffer timestamp with respect of the encoding time and not
+     * the playing time for a continous stream in the encoders input */
+    new_buf_ts = buf_ts - gcc->priv->current_recording_start_ts + gcc->priv->last_accum_recorded_ts;
 
-      if (buf_ts != GST_CLOCK_TIME_NONE) {
-        /* Get the buffer timestamp with respect of the encoding time and not
-         * the playing time for a continous stream in the encoders input */
-        new_buf_ts = buf_ts - gcc->priv->current_recording_start_ts + gcc->priv->last_accum_recorded_ts;
+    /* Store the last timestamp seen on this pad */
+    if (is_video)
+      gcc->priv->last_video_buf_ts = new_buf_ts;
+    else
+      gcc->priv->last_audio_buf_ts = new_buf_ts;
 
-        /* Store the last timestamp seen on this pad */
-        if (is_video)
-          gcc->priv->last_video_buf_ts = new_buf_ts;
-        else
-          gcc->priv->last_audio_buf_ts = new_buf_ts;
+    /* Update the highest encoded timestamp */
+    if (new_buf_ts + duration > gcc->priv->accum_recorded_ts)
+      gcc->priv->accum_recorded_ts = new_buf_ts + duration;
+  } else {
+    /* h264parse only sets the timestamp on the first buffer if a frame is
+     * split in several ones. Other parsers might do the same. We only set
+     * the last timestamp seen on the pad */
+    if (is_video)
+      new_buf_ts = gcc->priv->last_video_buf_ts;
+    else
+      new_buf_ts = gcc->priv->last_audio_buf_ts;
+  }
 
-        /* Update the highest encoded timestamp */
-        if (new_buf_ts + duration > gcc->priv->accum_recorded_ts)
-          gcc->priv->accum_recorded_ts = new_buf_ts + duration;
-      } else {
-        /* h264parse only sets the timestamp on the first buffer if a frame is
-         * split in several ones. Other parsers might do the same. We only set
-         * the last timestamp seen on the pad */
-        if (is_video)
-          new_buf_ts = gcc->priv->last_video_buf_ts;
-        else
-          new_buf_ts = gcc->priv->last_audio_buf_ts;
-      }
+  GST_BUFFER_TIMESTAMP (enc_buf) = new_buf_ts;
 
-      /* We don't want to overwrite timestamps of the preview branch */
-      buf = gst_buffer_make_metadata_writable(buf);
-      GST_BUFFER_TIMESTAMP (buf) = new_buf_ts;
-      data = (GstMiniObject*) buf;
+  GST_LOG_OBJECT(gcc, "Pushing %s frame to the encoder in ts:% " GST_TIME_FORMAT
+      " out ts: %" GST_TIME_FORMAT, is_video ? "video": "audio",
+      GST_TIME_ARGS(buf_ts), GST_TIME_ARGS(new_buf_ts));
 
-      GST_LOG_OBJECT(gcc, "Pushing %s frame to the encoder in ts:% " GST_TIME_FORMAT
-          " out ts: %" GST_TIME_FORMAT, is_video ? "video": "audio",
-          GST_TIME_ARGS(buf_ts), GST_TIME_ARGS(new_buf_ts));
-      ret = TRUE;
-    }
-  }
+  if (is_video)
+    gst_app_src_push_buffer(GST_APP_SRC(gcc->priv->video_appsrc), enc_buf);
+  else
+    gst_app_src_push_buffer(GST_APP_SRC(gcc->priv->audio_appsrc), enc_buf);
 
 done:
   {
     g_mutex_unlock(gcc->priv->recording_lock);
-    return ret;
+    return TRUE;
   }
 }
 
 static gboolean
-gst_camera_capturer_audio_encoding_probe (GstPad *pad, GstMiniObject *data,
+gst_camera_capturer_audio_encoding_probe (GstPad *pad, GstBuffer *buf,
     GstCameraCapturer *gcc)
 {
-  return gst_camera_capturer_encoding_retimestamper(gcc, data, FALSE);
+  return gst_camera_capturer_encoding_retimestamper(gcc, buf, FALSE);
 }
 
 static gboolean
-gst_camera_capturer_video_encoding_probe (GstPad *pad, GstMiniObject *data,
+gst_camera_capturer_video_encoding_probe (GstPad *pad, GstBuffer *buf,
     GstCameraCapturer *gcc)
 {
-  return gst_camera_capturer_encoding_retimestamper(gcc, data, TRUE);
+  return gst_camera_capturer_encoding_retimestamper(gcc, buf, TRUE);
 }
 
 static void
 gst_camera_capturer_create_decoder_bin (GstCameraCapturer *gcc, GstElement *decoder_bin)
 {
-  /*    decoder --> atee --> audio_queue
-   *            |        |
-   *            |        --> audio_preview_queue
+  /*    decoder --> video_preview_queue
    *            |
-   *            --> vtee --> video_queue
-   *                     \
-   *                     --> video_preview_queue
+   *            --> audio_preview_queue
+   *
+   *            video_appsrc   --> video_queue
+   *            audio_appsrc   --> audio_queue
    */
 
-  GstElement *video_tee, *v_queue, *v_prev_queue;
-  GstPad *v_dec_pad, *v_tee_pad;
-  GstPad *v_queue_pad, *v_prev_queue_pad;
+  GstElement *v_queue, *v_prev_queue;
+  GstPad *v_dec_pad, *v_queue_pad, *v_prev_queue_pad;
   GstPad *dec_sink_pad;
 
   GST_INFO_OBJECT(gcc, "Creating decoder bin");
   /* Create elements */
   gcc->priv->decoder_bin = gst_bin_new("decoder_bin");
-  video_tee = gst_element_factory_make("tee", NULL);
   v_queue = gst_element_factory_make("queue2", "video-queue");
+  gcc->priv->video_appsrc = gst_element_factory_make("appsrc", "video-appsrc");
   v_prev_queue = gst_element_factory_make("queue2", "video-preview-queue");
 
   g_object_set(v_queue, "max-size-time", 1 * GST_SECOND, NULL);
   g_object_set(v_prev_queue, "max-size-bytes", 0,  NULL);
 
-  gst_bin_add_many(GST_BIN(gcc->priv->decoder_bin), decoder_bin, video_tee, v_queue,
-      v_prev_queue, NULL);
+  gst_bin_add_many(GST_BIN(gcc->priv->decoder_bin), decoder_bin, v_queue,
+      gcc->priv->video_appsrc, v_prev_queue, NULL);
 
-  /* Link tees to queues */
-  gst_element_link(video_tee, v_prev_queue);
-  gst_element_link(video_tee, v_queue);
-
-  /* link decoder to the tees */
+  /* link decoder to the preview-queue */
   v_dec_pad = gst_element_get_static_pad(decoder_bin, "video");
-  v_tee_pad = gst_element_get_static_pad(video_tee, "sink");
-  gst_pad_link(v_dec_pad, v_tee_pad);
+  v_prev_queue_pad = gst_element_get_static_pad(v_prev_queue, "sink");
+  gst_pad_link(v_dec_pad, v_prev_queue_pad);
   gst_object_unref(v_dec_pad);
-  gst_object_unref(v_tee_pad);
+  gst_object_unref(v_prev_queue_pad);
+
+  /* Link appsrc */
+  gst_element_link (gcc->priv->video_appsrc, v_queue);
 
   /* Create ghost pads */
   v_queue_pad = gst_element_get_static_pad(v_queue, "src");
@@ -1336,35 +1311,33 @@ gst_camera_capturer_create_decoder_bin (GstCameraCapturer *gcc, GstElement *deco
   gst_object_unref(dec_sink_pad);
 
   /* Add pad probes for the encoding branch */
-  v_queue_pad = gst_element_get_static_pad(v_queue, "sink");
-  gst_pad_add_data_probe(v_queue_pad, (GCallback) gst_camera_capturer_video_encoding_probe, gcc);
-  gst_object_unref(v_queue_pad);
+  v_prev_queue_pad = gst_element_get_static_pad(v_prev_queue, "src");
+  gst_pad_add_buffer_probe(v_prev_queue_pad, (GCallback) gst_camera_capturer_video_encoding_probe, gcc);
+  gst_object_unref(v_prev_queue_pad);
 
   if (gcc->priv->audio_enabled) {
-    GstElement *audio_tee, *a_queue, *a_prev_queue;
-    GstPad *a_dec_pad, *a_tee_pad;
-    GstPad *a_queue_pad, *a_prev_queue_pad;
+    GstElement *a_queue, *a_prev_queue;
+    GstPad *a_dec_pad, *a_queue_pad, *a_prev_queue_pad;
 
     /* Create elements */
-    audio_tee = gst_element_factory_make("tee", NULL);
+    gcc->priv->audio_appsrc = gst_element_factory_make("appsrc", "video-appsrc");
     a_queue = gst_element_factory_make("queue2", "audio-queue");
     a_prev_queue = gst_element_factory_make("queue2", "audio-preview-queue");
 
     g_object_set(a_queue, "max-size-time", 1 * GST_SECOND,  NULL);
 
-    gst_bin_add_many(GST_BIN(gcc->priv->decoder_bin), audio_tee, a_queue,
+    gst_bin_add_many(GST_BIN(gcc->priv->decoder_bin), gcc->priv->audio_appsrc, a_queue,
         a_prev_queue, NULL);
 
-    /* Link tees to queues */
-    gst_element_link(audio_tee, a_prev_queue);
-    gst_element_link(audio_tee, a_queue);
+    /* Link appsrc to the queue */
+    gst_element_link(gcc->priv->audio_appsrc, a_queue);
 
-    /* link decoder to the tees */
+    /* link decoder to the queue */
     a_dec_pad = gst_element_get_static_pad(decoder_bin, "audio");
-    a_tee_pad = gst_element_get_static_pad(audio_tee, "sink");
-    gst_pad_link(a_dec_pad, a_tee_pad);
+    a_prev_queue_pad = gst_element_get_static_pad(a_prev_queue, "sink");
+    gst_pad_link(a_dec_pad, a_prev_queue_pad);
     gst_object_unref(a_dec_pad);
-    gst_object_unref(a_tee_pad);
+    gst_object_unref(a_prev_queue_pad);
 
     /* Create ghost pads */
     a_queue_pad = gst_element_get_static_pad(a_queue, "src");
@@ -1375,9 +1348,9 @@ gst_camera_capturer_create_decoder_bin (GstCameraCapturer *gcc, GstElement *deco
     gst_object_unref(a_prev_queue_pad);
 
     /* Add pad probes for the encoding branch */
-    a_queue_pad = gst_element_get_static_pad(a_queue, "sink");
-    gst_pad_add_data_probe(a_queue_pad, (GCallback) gst_camera_capturer_audio_encoding_probe, gcc);
-    gst_object_unref(a_queue_pad);
+    a_prev_queue_pad = gst_element_get_static_pad(a_prev_queue, "src");
+    gst_pad_add_buffer_probe(a_prev_queue_pad, (GCallback) gst_camera_capturer_audio_encoding_probe, gcc);
+    gst_object_unref(a_prev_queue_pad);
   }
 }
 
@@ -2295,8 +2268,6 @@ gst_camera_capturer_get_current_frame (GstCameraCapturer * gcc)
 void
 gst_camera_capturer_stop (GstCameraCapturer * gcc)
 {
-  GstPad *video_pad;
-
   g_return_if_fail (gcc != NULL);
   g_return_if_fail (GST_IS_CAMERA_CAPTURER (gcc));
 
@@ -2311,6 +2282,7 @@ gst_camera_capturer_stop (GstCameraCapturer * gcc)
   GST_INFO_OBJECT(gcc, "Closing capture");
   g_mutex_lock(gcc->priv->recording_lock);
   gcc->priv->closing_recording = TRUE;
+  gcc->priv->is_recording = FALSE;
   g_mutex_unlock(gcc->priv->recording_lock);
 
   gcc_encoder_send_event(gcc, gst_event_new_eos());



[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]