[longomatch] Fix indentation with gst-indent
- From: Andoni Morales Alastruey <amorales src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [longomatch] Fix indentation with gst-indent
- Date: Wed, 5 Nov 2014 19:02:32 +0000 (UTC)
commit 117fbca080c6aad458a41efc928e12c364b9da7d
Author: Andoni Morales Alastruey <ylatuya gmail com>
Date: Mon Nov 3 11:31:01 2014 +0100
Fix indentation with gst-indent
libcesarplayer/gst-camera-capturer.c | 510 ++++++++++++++++++----------------
libcesarplayer/gst-nle-source.c | 23 +-
libcesarplayer/gst-remuxer.c | 82 +++---
libcesarplayer/gst-video-encoder.c | 115 ++++----
libcesarplayer/lgm-video-player.c | 72 +++---
libcesarplayer/main.c | 2 +-
libcesarplayer/test-capturer.c | 23 +-
libcesarplayer/test-discoverer.c | 2 +-
libcesarplayer/test-editor.c | 42 ++--
libcesarplayer/test-remuxer.c | 19 +-
libcesarplayer/video-utils.m | 96 ++++---
11 files changed, 518 insertions(+), 468 deletions(-)
---
diff --git a/libcesarplayer/gst-camera-capturer.c b/libcesarplayer/gst-camera-capturer.c
index 0f4746b..a232fe9 100644
--- a/libcesarplayer/gst-camera-capturer.c
+++ b/libcesarplayer/gst-camera-capturer.c
@@ -89,8 +89,8 @@ struct GstCameraCapturerPrivate
GstElement *audio_enc;
GstElement *muxer;
GstElement *filesink;
- GstElement* video_appsrc;
- GstElement* audio_appsrc;
+ GstElement *video_appsrc;
+ GstElement *audio_appsrc;
/* Recording */
gboolean is_recording;
@@ -125,7 +125,7 @@ static void gcc_bus_message_cb (GstBus * bus, GstMessage * message,
gpointer data);
static void gcc_element_msg_sync (GstBus * bus, GstMessage * msg,
gpointer data);
-static gboolean gcc_get_video_stream_info (GstPad *pad, GstPad *peer,
+static gboolean gcc_get_video_stream_info (GstPad * pad, GstPad * peer,
GstCameraCapturer * gcc);
G_DEFINE_TYPE (GstCameraCapturer, gst_camera_capturer, G_TYPE_OBJECT);
@@ -277,7 +277,7 @@ gst_camera_capturer_error_quark (void)
}
static void
-gst_camera_capturer_update_device_id (GstCameraCapturer *gcc)
+gst_camera_capturer_update_device_id (GstCameraCapturer * gcc)
{
const gchar *prop_name;
@@ -293,17 +293,17 @@ gst_camera_capturer_update_device_id (GstCameraCapturer *gcc)
prop_name = "device-name";
if (prop_name)
- g_object_set(gcc->priv->source, prop_name, gcc->priv->device_id, NULL);
+ g_object_set (gcc->priv->source, prop_name, gcc->priv->device_id, NULL);
}
static void
-cb_new_pad (GstElement * element, GstPad * pad, GstCameraCapturer *gcc)
+cb_new_pad (GstElement * element, GstPad * pad, GstCameraCapturer * gcc)
{
GstCaps *caps;
const gchar *mime;
GstElement *sink = NULL;
GstPad *epad;
- GstBin *bin = GST_BIN(gcc->priv->source_decoder_bin);
+ GstBin *bin = GST_BIN (gcc->priv->source_decoder_bin);
caps = gst_pad_get_caps_reffed (pad);
mime = gst_structure_get_name (gst_caps_get_structure (caps, 0));
@@ -315,16 +315,16 @@ cb_new_pad (GstElement * element, GstPad * pad, GstCameraCapturer *gcc)
}
if (sink != NULL) {
- epad = gst_element_get_static_pad(sink, "sink");
+ epad = gst_element_get_static_pad (sink, "sink");
gst_pad_link (pad, epad);
- gst_object_unref(epad);
+ gst_object_unref (epad);
gst_object_unref (sink);
}
- gst_caps_unref(caps);
+ gst_caps_unref (caps);
}
static void
-gst_camera_capturer_create_encoder_bin (GstCameraCapturer *gcc)
+gst_camera_capturer_create_encoder_bin (GstCameraCapturer * gcc)
{
GstElement *colorspace, *videoscale, *videorate;
GstCaps *caps;
@@ -333,11 +333,11 @@ gst_camera_capturer_create_encoder_bin (GstCameraCapturer *gcc)
GST_INFO_OBJECT (gcc, "Creating encoder bin");
gcc->priv->encoder_bin = gst_bin_new ("encoder_bin");
- colorspace = gst_element_factory_make("ffmpegcolorspace", NULL);
- videoscale = gst_element_factory_make("videoscale", NULL);
- videorate = gst_element_factory_make("videorate", NULL);
- gcc->priv->video_filter = gst_element_factory_make("capsfilter", NULL);
- gcc->priv->filesink = gst_element_factory_make("filesink", NULL);
+ colorspace = gst_element_factory_make ("ffmpegcolorspace", NULL);
+ videoscale = gst_element_factory_make ("videoscale", NULL);
+ videorate = gst_element_factory_make ("videorate", NULL);
+ gcc->priv->video_filter = gst_element_factory_make ("capsfilter", NULL);
+ gcc->priv->filesink = gst_element_factory_make ("filesink", NULL);
/* Set caps for the encoding resolution */
caps = gst_caps_new_simple ("video/x-raw-yuv", "framerate",
@@ -350,39 +350,40 @@ gst_camera_capturer_create_encoder_bin (GstCameraCapturer *gcc)
gst_caps_set_simple (caps, "height", G_TYPE_INT, gcc->priv->output_height,
NULL);
}
- g_object_set(gcc->priv->video_filter, "caps", caps, NULL);
+ g_object_set (gcc->priv->video_filter, "caps", caps, NULL);
- gst_bin_add_many(GST_BIN(gcc->priv->encoder_bin), videoscale,
+ gst_bin_add_many (GST_BIN (gcc->priv->encoder_bin), videoscale,
colorspace, videorate, gcc->priv->video_filter, gcc->priv->video_enc,
gcc->priv->muxer, gcc->priv->filesink, NULL);
- gst_element_link_many(videoscale, colorspace, videorate, gcc->priv->video_filter,
- gcc->priv->video_enc, gcc->priv->muxer, NULL);
- gst_element_link(gcc->priv->muxer, gcc->priv->filesink);
+ gst_element_link_many (videoscale, colorspace, videorate,
+ gcc->priv->video_filter, gcc->priv->video_enc, gcc->priv->muxer, NULL);
+ gst_element_link (gcc->priv->muxer, gcc->priv->filesink);
g_object_set (gcc->priv->filesink, "location", gcc->priv->output_file, NULL);
/* Create ghost pads */
v_sink_pad = gst_element_get_static_pad (videoscale, "sink");
- gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("video", v_sink_pad));
+ gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("video",
+ v_sink_pad));
gst_object_unref (GST_OBJECT (v_sink_pad));
- if (gcc->priv->audio_enabled)
- {
+ if (gcc->priv->audio_enabled) {
GstElement *audioconvert, *audioresample;
GstPad *a_sink_pad;
- audioconvert = gst_element_factory_make("audioconvert", NULL);
- audioresample = gst_element_factory_make("audioresample", NULL);
+ audioconvert = gst_element_factory_make ("audioconvert", NULL);
+ audioresample = gst_element_factory_make ("audioresample", NULL);
- gst_bin_add_many(GST_BIN(gcc->priv->encoder_bin), audioconvert, audioresample,
- audioresample, gcc->priv->audio_enc, NULL);
+ gst_bin_add_many (GST_BIN (gcc->priv->encoder_bin), audioconvert,
+ audioresample, audioresample, gcc->priv->audio_enc, NULL);
- gst_element_link_many(audioconvert, audioresample, gcc->priv->audio_enc,
+ gst_element_link_many (audioconvert, audioresample, gcc->priv->audio_enc,
gcc->priv->muxer, NULL);
a_sink_pad = gst_element_get_static_pad (audioconvert, "sink");
- gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("audio", a_sink_pad));
+ gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("audio",
+ a_sink_pad));
gst_object_unref (GST_OBJECT (a_sink_pad));
}
@@ -390,23 +391,25 @@ gst_camera_capturer_create_encoder_bin (GstCameraCapturer *gcc)
}
static void
-gst_camera_capturer_create_remuxer_bin (GstCameraCapturer *gcc)
+gst_camera_capturer_create_remuxer_bin (GstCameraCapturer * gcc)
{
GstElement *muxer;
GstPad *v_sink_pad;
GST_INFO_OBJECT (gcc, "Creating remuxer bin");
gcc->priv->encoder_bin = gst_bin_new ("encoder_bin");
- muxer = gst_element_factory_make("qtmux", NULL);
- gcc->priv->filesink = gst_element_factory_make("filesink", NULL);
+ muxer = gst_element_factory_make ("qtmux", NULL);
+ gcc->priv->filesink = gst_element_factory_make ("filesink", NULL);
g_object_set (gcc->priv->filesink, "location", gcc->priv->output_file, NULL);
- gst_bin_add_many(GST_BIN(gcc->priv->encoder_bin), muxer, gcc->priv->filesink, NULL);
- gst_element_link(muxer, gcc->priv->filesink);
+ gst_bin_add_many (GST_BIN (gcc->priv->encoder_bin), muxer,
+ gcc->priv->filesink, NULL);
+ gst_element_link (muxer, gcc->priv->filesink);
/* Create ghost pads */
v_sink_pad = gst_element_get_request_pad (muxer, "video_%d");
- gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("video", v_sink_pad));
+ gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("video",
+ v_sink_pad));
gst_object_unref (v_sink_pad);
if (gcc->priv->audio_enabled) {
@@ -414,13 +417,14 @@ gst_camera_capturer_create_remuxer_bin (GstCameraCapturer *gcc)
/* Create ghost pads */
a_sink_pad = gst_element_get_request_pad (muxer, "audio_%d");
- gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("audio", a_sink_pad));
+ gst_element_add_pad (gcc->priv->encoder_bin, gst_ghost_pad_new ("audio",
+ a_sink_pad));
gst_object_unref (GST_OBJECT (v_sink_pad));
}
}
static GstElement *
-gst_camera_capturer_prepare_raw_source (GstCameraCapturer *gcc)
+gst_camera_capturer_prepare_raw_source (GstCameraCapturer * gcc)
{
GstElement *bin, *v_identity;
GstPad *video_pad, *src_pad;
@@ -443,13 +447,13 @@ gst_camera_capturer_prepare_raw_source (GstCameraCapturer *gcc)
gst_element_add_pad (bin, gst_ghost_pad_new ("sink", src_pad));
gst_object_unref (GST_OBJECT (src_pad));
- gst_camera_capturer_create_encoder_bin(gcc);
+ gst_camera_capturer_create_encoder_bin (gcc);
return bin;
}
static GstElement *
-gst_camera_capturer_prepare_uri_source (GstCameraCapturer *gcc)
+gst_camera_capturer_prepare_uri_source (GstCameraCapturer * gcc)
{
GstElement *bin, *decodebin, *identity;
GstPad *video_pad;
@@ -488,13 +492,13 @@ gst_camera_capturer_prepare_uri_source (GstCameraCapturer *gcc)
g_signal_connect (decodebin, "pad-added", G_CALLBACK (cb_new_pad), gcc);
- gst_camera_capturer_create_encoder_bin(gcc);
+ gst_camera_capturer_create_encoder_bin (gcc);
return bin;
}
static GstElement *
-gst_camera_capturer_prepare_dv_source (GstCameraCapturer *gcc)
+gst_camera_capturer_prepare_dv_source (GstCameraCapturer * gcc)
{
GstElement *bin, *decodebin, *colorspace, *deinterlacer;
GstPad *video_pad, *src_pad;
@@ -536,15 +540,15 @@ gst_camera_capturer_prepare_dv_source (GstCameraCapturer *gcc)
g_signal_connect (decodebin, "pad-added", G_CALLBACK (cb_new_pad), gcc);
- gst_camera_capturer_create_encoder_bin(gcc);
+ gst_camera_capturer_create_encoder_bin (gcc);
return bin;
}
static GstElement *
-gst_camera_capturer_prepare_mpegts_source (GstCameraCapturer *gcc)
+gst_camera_capturer_prepare_mpegts_source (GstCameraCapturer * gcc)
{
- GstElement *bin, *demuxer, *video, *video_parser;
+ GstElement *bin, *demuxer, *video, *video_parser;
GstPad *video_pad, *src_pad;
GST_INFO_OBJECT (gcc, "Creating mpegts source");
@@ -558,10 +562,12 @@ gst_camera_capturer_prepare_mpegts_source (GstCameraCapturer *gcc)
demuxer = gst_element_factory_make ("mpegtsdemux", NULL);
video_parser = gst_element_factory_make ("h264parse", "video-pad");
video = gst_element_factory_make ("capsfilter", NULL);
- g_object_set(video, "caps", gst_caps_from_string("video/x-h264, stream-format=avc, alignment=au"), NULL);
+ g_object_set (video, "caps",
+ gst_caps_from_string ("video/x-h264, stream-format=avc, alignment=au"),
+ NULL);
gst_bin_add_many (GST_BIN (bin), demuxer, video_parser, video, NULL);
- gst_element_link(video_parser, video);
+ gst_element_link (video_parser, video);
/* add ghostpad */
video_pad = gst_element_get_static_pad (video, "src");
@@ -587,14 +593,14 @@ gst_camera_capturer_prepare_mpegts_source (GstCameraCapturer *gcc)
g_signal_connect (demuxer, "pad-added", G_CALLBACK (cb_new_pad), gcc);
- gst_camera_capturer_create_remuxer_bin(gcc);
+ gst_camera_capturer_create_remuxer_bin (gcc);
return bin;
}
static gboolean
-gst_camera_capturer_encoding_retimestamper (GstCameraCapturer *gcc,
- GstBuffer *prev_buf, gboolean is_video)
+gst_camera_capturer_encoding_retimestamper (GstCameraCapturer * gcc,
+ GstBuffer * prev_buf, gboolean is_video)
{
GstClockTime buf_ts, new_buf_ts, duration;
GstBuffer *enc_buf;
@@ -603,22 +609,24 @@ gst_camera_capturer_encoding_retimestamper (GstCameraCapturer *gcc,
if (!gcc->priv->is_recording) {
/* Drop buffers if we are not recording */
- GST_LOG_OBJECT (gcc, "Dropping buffer on %s pad", is_video ? "video": "audio");
+ GST_LOG_OBJECT (gcc, "Dropping buffer on %s pad",
+ is_video ? "video" : "audio");
goto done;
}
/* If we are just remuxing, drop everything until we see a keyframe */
if (gcc->priv->video_needs_keyframe_sync && !gcc->priv->video_synced) {
- if (is_video && !GST_BUFFER_FLAG_IS_SET(prev_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ if (is_video
+ && !GST_BUFFER_FLAG_IS_SET (prev_buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
gcc->priv->video_synced = TRUE;
} else {
GST_LOG_OBJECT (gcc, "Waiting for a keyframe, "
- "dropping buffer on %s pad", is_video ? "video": "audio");
+ "dropping buffer on %s pad", is_video ? "video" : "audio");
goto done;
}
}
- enc_buf = gst_buffer_create_sub (prev_buf, 0, GST_BUFFER_SIZE(prev_buf));
+ enc_buf = gst_buffer_create_sub (prev_buf, 0, GST_BUFFER_SIZE (prev_buf));
buf_ts = GST_BUFFER_TIMESTAMP (prev_buf);
duration = GST_BUFFER_DURATION (prev_buf);
if (duration == GST_CLOCK_TIME_NONE)
@@ -626,11 +634,11 @@ gst_camera_capturer_encoding_retimestamper (GstCameraCapturer *gcc,
/* Check if it's the first buffer after starting or restarting the capture
* and update the timestamps accordingly */
- if (G_UNLIKELY(gcc->priv->current_recording_start_ts == GST_CLOCK_TIME_NONE)) {
+ if (G_UNLIKELY (gcc->priv->current_recording_start_ts == GST_CLOCK_TIME_NONE)) {
gcc->priv->current_recording_start_ts = buf_ts;
gcc->priv->last_accum_recorded_ts = gcc->priv->accum_recorded_ts;
GST_INFO_OBJECT (gcc, "Starting recording at %" GST_TIME_FORMAT,
- GST_TIME_ARGS(gcc->priv->last_accum_recorded_ts));
+ GST_TIME_ARGS (gcc->priv->last_accum_recorded_ts));
}
/* Clip buffers that are not in the segment */
@@ -642,7 +650,9 @@ gst_camera_capturer_encoding_retimestamper (GstCameraCapturer *gcc,
if (buf_ts != GST_CLOCK_TIME_NONE) {
/* Get the buffer timestamp with respect of the encoding time and not
* the playing time for a continous stream in the encoders input */
- new_buf_ts = buf_ts - gcc->priv->current_recording_start_ts + gcc->priv->last_accum_recorded_ts;
+ new_buf_ts =
+ buf_ts - gcc->priv->current_recording_start_ts +
+ gcc->priv->last_accum_recorded_ts;
/* Store the last timestamp seen on this pad */
if (is_video)
@@ -665,14 +675,15 @@ gst_camera_capturer_encoding_retimestamper (GstCameraCapturer *gcc,
GST_BUFFER_TIMESTAMP (enc_buf) = new_buf_ts;
- GST_LOG_OBJECT(gcc, "Pushing %s frame to the encoder in ts:% " GST_TIME_FORMAT
- " out ts: %" GST_TIME_FORMAT, is_video ? "video": "audio",
- GST_TIME_ARGS(buf_ts), GST_TIME_ARGS(new_buf_ts));
+ GST_LOG_OBJECT (gcc,
+ "Pushing %s frame to the encoder in ts:% " GST_TIME_FORMAT " out ts: %"
+ GST_TIME_FORMAT, is_video ? "video" : "audio", GST_TIME_ARGS (buf_ts),
+ GST_TIME_ARGS (new_buf_ts));
if (is_video)
- gst_app_src_push_buffer(GST_APP_SRC(gcc->priv->video_appsrc), enc_buf);
+ gst_app_src_push_buffer (GST_APP_SRC (gcc->priv->video_appsrc), enc_buf);
else
- gst_app_src_push_buffer(GST_APP_SRC(gcc->priv->audio_appsrc), enc_buf);
+ gst_app_src_push_buffer (GST_APP_SRC (gcc->priv->audio_appsrc), enc_buf);
done:
{
@@ -682,21 +693,21 @@ done:
}
static gboolean
-gst_camera_capturer_audio_encoding_probe (GstPad *pad, GstBuffer *buf,
- GstCameraCapturer *gcc)
+gst_camera_capturer_audio_encoding_probe (GstPad * pad, GstBuffer * buf,
+ GstCameraCapturer * gcc)
{
- return gst_camera_capturer_encoding_retimestamper(gcc, buf, FALSE);
+ return gst_camera_capturer_encoding_retimestamper (gcc, buf, FALSE);
}
static gboolean
-gst_camera_capturer_video_encoding_probe (GstPad *pad, GstBuffer *buf,
- GstCameraCapturer *gcc)
+gst_camera_capturer_video_encoding_probe (GstPad * pad, GstBuffer * buf,
+ GstCameraCapturer * gcc)
{
- return gst_camera_capturer_encoding_retimestamper(gcc, buf, TRUE);
+ return gst_camera_capturer_encoding_retimestamper (gcc, buf, TRUE);
}
static void
-gst_camera_capturer_create_decoder_bin (GstCameraCapturer *gcc)
+gst_camera_capturer_create_decoder_bin (GstCameraCapturer * gcc)
{
/* decoder --> video_preview_queue
* |
@@ -710,261 +721,281 @@ gst_camera_capturer_create_decoder_bin (GstCameraCapturer *gcc)
GstPad *v_dec_pad, *v_queue_pad, *v_prev_queue_pad;
GstPad *dec_sink_pad;
- GST_INFO_OBJECT(gcc, "Creating decoder bin");
+ GST_INFO_OBJECT (gcc, "Creating decoder bin");
decoder_bin = gcc->priv->source_decoder_bin;
/* Create elements */
- gcc->priv->decoder_bin = gst_bin_new("decoder_bin");
- v_queue = gst_element_factory_make("queue2", "video-queue");
- gcc->priv->video_appsrc = gst_element_factory_make("appsrc", "video-appsrc");
- v_prev_queue = gst_element_factory_make("queue2", "video-preview-queue");
+ gcc->priv->decoder_bin = gst_bin_new ("decoder_bin");
+ v_queue = gst_element_factory_make ("queue2", "video-queue");
+ gcc->priv->video_appsrc = gst_element_factory_make ("appsrc", "video-appsrc");
+ v_prev_queue = gst_element_factory_make ("queue2", "video-preview-queue");
- g_object_set(v_queue, "max-size-time", 1 * GST_SECOND, NULL);
- g_object_set(v_prev_queue, "max-size-bytes", 0, NULL);
+ g_object_set (v_queue, "max-size-time", 1 * GST_SECOND, NULL);
+ g_object_set (v_prev_queue, "max-size-bytes", 0, NULL);
- gst_bin_add_many(GST_BIN(gcc->priv->decoder_bin), decoder_bin, v_queue,
+ gst_bin_add_many (GST_BIN (gcc->priv->decoder_bin), decoder_bin, v_queue,
gcc->priv->video_appsrc, v_prev_queue, NULL);
/* link decoder to the preview-queue */
- v_dec_pad = gst_element_get_static_pad(decoder_bin, "video");
- v_prev_queue_pad = gst_element_get_static_pad(v_prev_queue, "sink");
- gst_pad_link(v_dec_pad, v_prev_queue_pad);
- gst_object_unref(v_dec_pad);
- gst_object_unref(v_prev_queue_pad);
+ v_dec_pad = gst_element_get_static_pad (decoder_bin, "video");
+ v_prev_queue_pad = gst_element_get_static_pad (v_prev_queue, "sink");
+ gst_pad_link (v_dec_pad, v_prev_queue_pad);
+ gst_object_unref (v_dec_pad);
+ gst_object_unref (v_prev_queue_pad);
/* Link appsrc */
gst_element_link (gcc->priv->video_appsrc, v_queue);
/* Create ghost pads */
- v_queue_pad = gst_element_get_static_pad(v_queue, "src");
- v_prev_queue_pad = gst_element_get_static_pad(v_prev_queue, "src");
- gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("video", v_queue_pad));
- gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("video_preview", v_prev_queue_pad));
- gst_object_unref(v_queue_pad);
- gst_object_unref(v_prev_queue_pad);
+ v_queue_pad = gst_element_get_static_pad (v_queue, "src");
+ v_prev_queue_pad = gst_element_get_static_pad (v_prev_queue, "src");
+ gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("video",
+ v_queue_pad));
+ gst_element_add_pad (gcc->priv->decoder_bin,
+ gst_ghost_pad_new ("video_preview", v_prev_queue_pad));
+ gst_object_unref (v_queue_pad);
+ gst_object_unref (v_prev_queue_pad);
/* Create the sink ghost pad, not needed for URI's */
- dec_sink_pad = gst_element_get_static_pad(decoder_bin, "sink");
+ dec_sink_pad = gst_element_get_static_pad (decoder_bin, "sink");
if (dec_sink_pad) {
- gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("sink", dec_sink_pad));
- gst_object_unref(dec_sink_pad);
+ gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("sink",
+ dec_sink_pad));
+ gst_object_unref (dec_sink_pad);
}
/* Add pad probes for the encoding branch */
- v_prev_queue_pad = gst_element_get_static_pad(v_prev_queue, "src");
- gst_pad_add_buffer_probe(v_prev_queue_pad, (GCallback) gst_camera_capturer_video_encoding_probe, gcc);
- gst_object_unref(v_prev_queue_pad);
+ v_prev_queue_pad = gst_element_get_static_pad (v_prev_queue, "src");
+ gst_pad_add_buffer_probe (v_prev_queue_pad,
+ (GCallback) gst_camera_capturer_video_encoding_probe, gcc);
+ gst_object_unref (v_prev_queue_pad);
if (gcc->priv->audio_enabled) {
GstElement *a_queue, *a_prev_queue;
GstPad *a_dec_pad, *a_queue_pad, *a_prev_queue_pad;
/* Create elements */
- gcc->priv->audio_appsrc = gst_element_factory_make("appsrc", "video-appsrc");
- a_queue = gst_element_factory_make("queue2", "audio-queue");
- a_prev_queue = gst_element_factory_make("queue2", "audio-preview-queue");
+ gcc->priv->audio_appsrc =
+ gst_element_factory_make ("appsrc", "video-appsrc");
+ a_queue = gst_element_factory_make ("queue2", "audio-queue");
+ a_prev_queue = gst_element_factory_make ("queue2", "audio-preview-queue");
- g_object_set(a_queue, "max-size-time", 1 * GST_SECOND, NULL);
+ g_object_set (a_queue, "max-size-time", 1 * GST_SECOND, NULL);
- gst_bin_add_many(GST_BIN(gcc->priv->decoder_bin), gcc->priv->audio_appsrc, a_queue,
- a_prev_queue, NULL);
+ gst_bin_add_many (GST_BIN (gcc->priv->decoder_bin), gcc->priv->audio_appsrc,
+ a_queue, a_prev_queue, NULL);
/* Link appsrc to the queue */
- gst_element_link(gcc->priv->audio_appsrc, a_queue);
+ gst_element_link (gcc->priv->audio_appsrc, a_queue);
/* link decoder to the queue */
- a_dec_pad = gst_element_get_static_pad(decoder_bin, "audio");
- a_prev_queue_pad = gst_element_get_static_pad(a_prev_queue, "sink");
- gst_pad_link(a_dec_pad, a_prev_queue_pad);
- gst_object_unref(a_dec_pad);
- gst_object_unref(a_prev_queue_pad);
+ a_dec_pad = gst_element_get_static_pad (decoder_bin, "audio");
+ a_prev_queue_pad = gst_element_get_static_pad (a_prev_queue, "sink");
+ gst_pad_link (a_dec_pad, a_prev_queue_pad);
+ gst_object_unref (a_dec_pad);
+ gst_object_unref (a_prev_queue_pad);
/* Create ghost pads */
- a_queue_pad = gst_element_get_static_pad(a_queue, "src");
- a_prev_queue_pad = gst_element_get_static_pad(a_prev_queue, "src");
- gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("audio", a_queue_pad));
- gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("audio_preview", a_prev_queue_pad));
- gst_object_unref(a_queue_pad);
- gst_object_unref(a_prev_queue_pad);
+ a_queue_pad = gst_element_get_static_pad (a_queue, "src");
+ a_prev_queue_pad = gst_element_get_static_pad (a_prev_queue, "src");
+ gst_element_add_pad (gcc->priv->decoder_bin, gst_ghost_pad_new ("audio",
+ a_queue_pad));
+ gst_element_add_pad (gcc->priv->decoder_bin,
+ gst_ghost_pad_new ("audio_preview", a_prev_queue_pad));
+ gst_object_unref (a_queue_pad);
+ gst_object_unref (a_prev_queue_pad);
/* Add pad probes for the encoding branch */
- a_prev_queue_pad = gst_element_get_static_pad(a_prev_queue, "src");
- gst_pad_add_buffer_probe(a_prev_queue_pad, (GCallback) gst_camera_capturer_audio_encoding_probe, gcc);
- gst_object_unref(a_prev_queue_pad);
+ a_prev_queue_pad = gst_element_get_static_pad (a_prev_queue, "src");
+ gst_pad_add_buffer_probe (a_prev_queue_pad,
+ (GCallback) gst_camera_capturer_audio_encoding_probe, gcc);
+ gst_object_unref (a_prev_queue_pad);
}
}
static void
-gst_camera_capturer_link_encoder_bin (GstCameraCapturer *gcc)
+gst_camera_capturer_link_encoder_bin (GstCameraCapturer * gcc)
{
GstPad *v_dec_pad, *v_enc_pad;
- GST_INFO_OBJECT(gcc, "Linking encoder bin");
+ GST_INFO_OBJECT (gcc, "Linking encoder bin");
- gst_bin_add(GST_BIN(gcc->priv->main_pipeline), gcc->priv->encoder_bin);
+ gst_bin_add (GST_BIN (gcc->priv->main_pipeline), gcc->priv->encoder_bin);
- v_dec_pad = gst_element_get_static_pad(gcc->priv->decoder_bin, "video");
- v_enc_pad = gst_element_get_static_pad(gcc->priv->encoder_bin, "video");
- gst_pad_link(v_dec_pad, v_enc_pad);
- gst_object_unref(v_dec_pad);
- gst_object_unref(v_enc_pad);
+ v_dec_pad = gst_element_get_static_pad (gcc->priv->decoder_bin, "video");
+ v_enc_pad = gst_element_get_static_pad (gcc->priv->encoder_bin, "video");
+ gst_pad_link (v_dec_pad, v_enc_pad);
+ gst_object_unref (v_dec_pad);
+ gst_object_unref (v_enc_pad);
if (gcc->priv->audio_enabled) {
GstPad *a_dec_pad, *a_enc_pad;
- a_dec_pad = gst_element_get_static_pad(gcc->priv->decoder_bin, "audio");
- a_enc_pad = gst_element_get_static_pad(gcc->priv->encoder_bin, "audio");
- gst_pad_link(a_dec_pad, a_enc_pad);
- gst_object_unref(a_dec_pad);
- gst_object_unref(a_enc_pad);
+ a_dec_pad = gst_element_get_static_pad (gcc->priv->decoder_bin, "audio");
+ a_enc_pad = gst_element_get_static_pad (gcc->priv->encoder_bin, "audio");
+ gst_pad_link (a_dec_pad, a_enc_pad);
+ gst_object_unref (a_dec_pad);
+ gst_object_unref (a_enc_pad);
}
- gst_element_set_state(gcc->priv->encoder_bin, GST_STATE_PLAYING);
+ gst_element_set_state (gcc->priv->encoder_bin, GST_STATE_PLAYING);
}
static void
-gst_camera_capturer_link_preview (GstCameraCapturer *gcc)
+gst_camera_capturer_link_preview (GstCameraCapturer * gcc)
{
GstPad *v_dec_prev_pad, *v_prev_pad;
- GST_INFO_OBJECT(gcc, "Linking preview bin");
+ GST_INFO_OBJECT (gcc, "Linking preview bin");
- gst_bin_add(GST_BIN(gcc->priv->main_pipeline), gcc->priv->decoder_bin);
+ gst_bin_add (GST_BIN (gcc->priv->main_pipeline), gcc->priv->decoder_bin);
if (gcc->priv->source_bin != NULL)
- gst_element_link(gcc->priv->source_bin, gcc->priv->decoder_bin);
+ gst_element_link (gcc->priv->source_bin, gcc->priv->decoder_bin);
- v_dec_prev_pad = gst_element_get_static_pad(gcc->priv->decoder_bin, "video_preview");
- v_prev_pad = gst_element_get_static_pad(gcc->priv->preview_bin, "video");
+ v_dec_prev_pad =
+ gst_element_get_static_pad (gcc->priv->decoder_bin, "video_preview");
+ v_prev_pad = gst_element_get_static_pad (gcc->priv->preview_bin, "video");
- gst_pad_link(v_dec_prev_pad, v_prev_pad);
+ gst_pad_link (v_dec_prev_pad, v_prev_pad);
- gst_object_unref(v_dec_prev_pad);
- gst_object_unref(v_prev_pad);
+ gst_object_unref (v_dec_prev_pad);
+ gst_object_unref (v_prev_pad);
if (gcc->priv->audio_enabled) {
GstPad *a_dec_prev_pad, *a_prev_pad;
- a_dec_prev_pad = gst_element_get_static_pad(gcc->priv->decoder_bin, "audio_preview");
- a_prev_pad = gst_element_get_static_pad(gcc->priv->preview_bin, "audio");
+ a_dec_prev_pad =
+ gst_element_get_static_pad (gcc->priv->decoder_bin, "audio_preview");
+ a_prev_pad = gst_element_get_static_pad (gcc->priv->preview_bin, "audio");
- gst_pad_link(a_dec_prev_pad, a_prev_pad);
+ gst_pad_link (a_dec_prev_pad, a_prev_pad);
- gst_object_unref(a_dec_prev_pad);
- gst_object_unref(a_prev_pad);
+ gst_object_unref (a_dec_prev_pad);
+ gst_object_unref (a_prev_pad);
}
- gst_element_set_state(gcc->priv->decoder_bin, GST_STATE_PLAYING);
+ gst_element_set_state (gcc->priv->decoder_bin, GST_STATE_PLAYING);
}
static gboolean
-cb_last_buffer (GstPad *pad, GstBuffer *buf, GstCameraCapturer *gcc){
+cb_last_buffer (GstPad * pad, GstBuffer * buf, GstCameraCapturer * gcc)
+{
if (buf != NULL) {
if (gcc->priv->last_buffer != NULL) {
- gst_buffer_unref(gcc->priv->last_buffer);
+ gst_buffer_unref (gcc->priv->last_buffer);
}
- gcc->priv->last_buffer = gst_buffer_ref(buf);
+ gcc->priv->last_buffer = gst_buffer_ref (buf);
}
return TRUE;
}
static void
-cb_new_prev_pad (GstElement * element, GstPad * pad, GstElement *bin)
+cb_new_prev_pad (GstElement * element, GstPad * pad, GstElement * bin)
{
GstPad *sink_pad;
- sink_pad = gst_element_get_static_pad(bin, "sink");
- gst_pad_link(pad, sink_pad);
- gst_object_unref(sink_pad);
+ sink_pad = gst_element_get_static_pad (bin, "sink");
+ gst_pad_link (pad, sink_pad);
+ gst_object_unref (sink_pad);
}
static void
-gst_camera_capturer_create_preview(GstCameraCapturer *gcc)
+gst_camera_capturer_create_preview (GstCameraCapturer * gcc)
{
GstElement *v_decoder, *video_bin;
GstPad *video_pad;
- v_decoder = gst_element_factory_make("decodebin2", "preview-decoder");
+ v_decoder = gst_element_factory_make ("decodebin2", "preview-decoder");
- video_bin = gst_parse_bin_from_description(
- "videoscale ! ffmpegcolorspace ! " DEFAULT_VIDEO_SINK " name=videosink sync=false", TRUE, NULL);
+ video_bin =
+ gst_parse_bin_from_description ("videoscale ! ffmpegcolorspace ! "
+ DEFAULT_VIDEO_SINK " name=videosink sync=false", TRUE, NULL);
- gcc->priv->preview_bin = gst_bin_new("preview_bin");
- gst_bin_add_many (GST_BIN(gcc->priv->preview_bin), v_decoder, video_bin, NULL);
+ gcc->priv->preview_bin = gst_bin_new ("preview_bin");
+ gst_bin_add_many (GST_BIN (gcc->priv->preview_bin), v_decoder, video_bin,
+ NULL);
- g_signal_connect (v_decoder, "pad-added", G_CALLBACK (cb_new_prev_pad), video_bin);
+ g_signal_connect (v_decoder, "pad-added", G_CALLBACK (cb_new_prev_pad),
+ video_bin);
- video_pad = gst_element_get_static_pad(video_bin, "sink");
+ video_pad = gst_element_get_static_pad (video_bin, "sink");
g_signal_connect (video_pad, "notify::caps",
G_CALLBACK (gcc_get_video_stream_info), gcc);
gst_pad_add_buffer_probe (video_pad, (GCallback) cb_last_buffer, gcc);
- gst_object_unref(video_pad);
+ gst_object_unref (video_pad);
/* Create ghost pads */
video_pad = gst_element_get_static_pad (v_decoder, "sink");
- gst_element_add_pad (gcc->priv->preview_bin, gst_ghost_pad_new ("video", video_pad));
+ gst_element_add_pad (gcc->priv->preview_bin, gst_ghost_pad_new ("video",
+ video_pad));
gst_object_unref (GST_OBJECT (video_pad));
if (gcc->priv->audio_enabled) {
GstElement *a_decoder, *audio_bin;
GstPad *audio_pad;
- a_decoder = gst_element_factory_make("decodebin2", NULL);
+ a_decoder = gst_element_factory_make ("decodebin2", NULL);
- audio_bin = gst_parse_bin_from_description(
- "audioconvert ! audioresample ! autoaudiosink name=audiosink", TRUE, NULL);
+ audio_bin =
+ gst_parse_bin_from_description
+ ("audioconvert ! audioresample ! autoaudiosink name=audiosink", TRUE,
+ NULL);
- gst_bin_add_many (GST_BIN(gcc->priv->preview_bin), a_decoder, audio_bin, NULL);
+ gst_bin_add_many (GST_BIN (gcc->priv->preview_bin), a_decoder, audio_bin,
+ NULL);
- g_signal_connect (a_decoder, "pad-added", G_CALLBACK (cb_new_prev_pad), audio_bin);
+ g_signal_connect (a_decoder, "pad-added", G_CALLBACK (cb_new_prev_pad),
+ audio_bin);
/* Create ghost pads */
audio_pad = gst_element_get_static_pad (a_decoder, "sink");
- gst_element_add_pad (gcc->priv->preview_bin, gst_ghost_pad_new ("audio", audio_pad));
+ gst_element_add_pad (gcc->priv->preview_bin, gst_ghost_pad_new ("audio",
+ audio_pad));
gst_object_unref (GST_OBJECT (audio_pad));
}
- gst_bin_add(GST_BIN(gcc->priv->main_pipeline), gcc->priv->preview_bin);
- gst_element_set_state(gcc->priv->preview_bin, GST_STATE_PLAYING);
+ gst_bin_add (GST_BIN (gcc->priv->main_pipeline), gcc->priv->preview_bin);
+ gst_element_set_state (gcc->priv->preview_bin, GST_STATE_PLAYING);
}
static void
-gst_camera_capturer_create_remainig (GstCameraCapturer *gcc)
+gst_camera_capturer_create_remainig (GstCameraCapturer * gcc)
{
- gst_camera_capturer_create_decoder_bin(gcc);
- gst_camera_capturer_create_preview(gcc);
+ gst_camera_capturer_create_decoder_bin (gcc);
+ gst_camera_capturer_create_preview (gcc);
- gst_camera_capturer_link_preview(gcc);
- gst_element_set_state(gcc->priv->main_pipeline, GST_STATE_PLAYING);
+ gst_camera_capturer_link_preview (gcc);
+ gst_element_set_state (gcc->priv->main_pipeline, GST_STATE_PLAYING);
}
static gboolean
-gst_camera_capturer_have_type_cb (GstElement *typefind, guint prob,
- GstCaps *caps, GstCameraCapturer *gcc)
+gst_camera_capturer_have_type_cb (GstElement * typefind, guint prob,
+ GstCaps * caps, GstCameraCapturer * gcc)
{
GstCaps *media_caps;
GstElement *decoder_bin = NULL;
- GST_INFO_OBJECT (gcc, "Found type with caps %s", gst_caps_to_string(caps));
+ GST_INFO_OBJECT (gcc, "Found type with caps %s", gst_caps_to_string (caps));
/* Check for DV streams */
- media_caps = gst_caps_from_string("video/x-dv, systemstream=true");
+ media_caps = gst_caps_from_string ("video/x-dv, systemstream=true");
- if (gst_caps_can_intersect(caps, media_caps)) {
- decoder_bin = gst_camera_capturer_prepare_dv_source(gcc);
- gst_caps_unref(media_caps);
+ if (gst_caps_can_intersect (caps, media_caps)) {
+ decoder_bin = gst_camera_capturer_prepare_dv_source (gcc);
+ gst_caps_unref (media_caps);
}
/* Check for MPEG-TS streams */
- media_caps = gst_caps_from_string("video/mpegts");
- if (gst_caps_can_intersect(caps, media_caps)) {
- decoder_bin = gst_camera_capturer_prepare_mpegts_source(gcc);
- gst_caps_unref(media_caps);
+ media_caps = gst_caps_from_string ("video/mpegts");
+ if (gst_caps_can_intersect (caps, media_caps)) {
+ decoder_bin = gst_camera_capturer_prepare_mpegts_source (gcc);
+ gst_caps_unref (media_caps);
}
/* Check for Raw streams */
media_caps = gst_caps_from_string ("video/x-raw-rgb; video/x-raw-yuv");
- if (gst_caps_can_intersect(caps, media_caps)) {
+ if (gst_caps_can_intersect (caps, media_caps)) {
gcc->priv->audio_enabled = FALSE;
- decoder_bin = gst_camera_capturer_prepare_raw_source(gcc);
- gst_caps_unref(media_caps);
+ decoder_bin = gst_camera_capturer_prepare_raw_source (gcc);
+ gst_caps_unref (media_caps);
}
if (decoder_bin != NULL) {
@@ -988,43 +1019,44 @@ gst_camera_capturer_create_video_source (GstCameraCapturer * gcc,
switch (type) {
case CAPTURE_SOURCE_TYPE_DV:
- GST_INFO_OBJECT(gcc, "Creating dv video source");
+ GST_INFO_OBJECT (gcc, "Creating dv video source");
break;
case CAPTURE_SOURCE_TYPE_SYSTEM:
- GST_INFO_OBJECT(gcc, "Creating system video source");
+ GST_INFO_OBJECT (gcc, "Creating system video source");
break;
case CAPTURE_SOURCE_TYPE_URI:
/* We don't use any source element for URI's, just a uridecodebin element
* which goes in the decoder bin */
- GST_INFO_OBJECT(gcc, "Skippinig creation of video source for URI");
+ GST_INFO_OBJECT (gcc, "Skippinig creation of video source for URI");
gcc->priv->source_bin = NULL;
gst_camera_capturer_prepare_uri_source (gcc);
gst_camera_capturer_create_remainig (gcc);
return TRUE;
case CAPTURE_SOURCE_TYPE_FILE:
- GST_INFO_OBJECT(gcc, "Creating file video source");
+ GST_INFO_OBJECT (gcc, "Creating file video source");
gcc->priv->source_element_name = g_strdup ("filesrc");
break;
default:
- g_assert_not_reached();
+ g_assert_not_reached ();
}
/* HACK: dshowvideosrc's device must be set before linking the element
* since the device is set in getcaps and can't be changed later */
if (!g_strcmp0 (gcc->priv->source_element_name, "dshowvideosrc")) {
- source_str = g_strdup_printf("%s device-name=\"%s\" name=source ! "
+ source_str = g_strdup_printf ("%s device-name=\"%s\" name=source ! "
"video/x-raw-yuv; video/x-raw-rgb; "
"video/x-dv, systemstream=(boolean)True "
"! typefind name=typefind", gcc->priv->source_element_name,
gcc->priv->device_id);
} else {
- source_str = g_strdup_printf("%s name=source %s ! typefind name=typefind",
+ source_str = g_strdup_printf ("%s name=source %s ! typefind name=typefind",
gcc->priv->source_element_name, filter);
}
- GST_INFO_OBJECT(gcc, "Created video source %s", source_str);
- gcc->priv->source_bin = gst_parse_bin_from_description(source_str, TRUE, NULL);
- g_free(source_str);
+ GST_INFO_OBJECT (gcc, "Created video source %s", source_str);
+ gcc->priv->source_bin =
+ gst_parse_bin_from_description (source_str, TRUE, NULL);
+ g_free (source_str);
if (!gcc->priv->source_bin) {
g_set_error (err,
GCC_ERROR,
@@ -1035,14 +1067,16 @@ gst_camera_capturer_create_video_source (GstCameraCapturer * gcc,
return FALSE;
}
- gcc->priv->source = gst_bin_get_by_name (GST_BIN(gcc->priv->source_bin), "source");
- typefind = gst_bin_get_by_name (GST_BIN(gcc->priv->source_bin), "typefind");
+ gcc->priv->source =
+ gst_bin_get_by_name (GST_BIN (gcc->priv->source_bin), "source");
+ typefind = gst_bin_get_by_name (GST_BIN (gcc->priv->source_bin), "typefind");
g_signal_connect (typefind, "have-type",
G_CALLBACK (gst_camera_capturer_have_type_cb), gcc);
- gst_camera_capturer_update_device_id(gcc);
+ gst_camera_capturer_update_device_id (gcc);
- GST_INFO_OBJECT(gcc, "Created video source %s", gcc->priv->source_element_name);
+ GST_INFO_OBJECT (gcc, "Created video source %s",
+ gcc->priv->source_element_name);
gst_object_unref (gcc->priv->source);
gst_object_unref (typefind);
@@ -1109,38 +1143,38 @@ gst_camera_capturer_create_video_muxer (GstCameraCapturer * gcc,
}
static void
-gst_camera_capturer_initialize (GstCameraCapturer *gcc)
+gst_camera_capturer_initialize (GstCameraCapturer * gcc)
{
- GError *err= NULL;
+ GError *err = NULL;
GST_INFO_OBJECT (gcc, "Initializing encoders");
- if (!gst_camera_capturer_create_video_encoder(gcc,
- gcc->priv->video_encoder_type, &err))
+ if (!gst_camera_capturer_create_video_encoder (gcc,
+ gcc->priv->video_encoder_type, &err))
goto missing_plugin;
- if (!gst_camera_capturer_create_audio_encoder(gcc,
- gcc->priv->audio_encoder_type, &err))
+ if (!gst_camera_capturer_create_audio_encoder (gcc,
+ gcc->priv->audio_encoder_type, &err))
goto missing_plugin;
- if (!gst_camera_capturer_create_video_muxer(gcc,
- gcc->priv->video_muxer_type, &err))
+ if (!gst_camera_capturer_create_video_muxer (gcc,
+ gcc->priv->video_muxer_type, &err))
goto missing_plugin;
GST_INFO_OBJECT (gcc, "Initializing source");
- if (!gst_camera_capturer_create_video_source(gcc,
- gcc->priv->source_type, &err))
+ if (!gst_camera_capturer_create_video_source (gcc,
+ gcc->priv->source_type, &err))
goto missing_plugin;
/* add the source element */
if (gcc->priv->source_bin)
- gst_bin_add(GST_BIN(gcc->priv->main_pipeline), gcc->priv->source_bin);
+ gst_bin_add (GST_BIN (gcc->priv->main_pipeline), gcc->priv->source_bin);
return;
missing_plugin:
- g_signal_emit (gcc, gcc_signals[SIGNAL_ERROR], 0, err->message);
- g_error_free (err);
+ g_signal_emit (gcc, gcc_signals[SIGNAL_ERROR], 0, err->message);
+ g_error_free (err);
}
static void
-gcc_encoder_send_event (GstCameraCapturer *gcc, GstEvent *event)
+gcc_encoder_send_event (GstCameraCapturer * gcc, GstEvent * event)
{
GstPad *video_pad, *audio_pad;
@@ -1148,15 +1182,15 @@ gcc_encoder_send_event (GstCameraCapturer *gcc, GstEvent *event)
return;
if (gcc->priv->audio_enabled) {
- gst_event_ref(event);
- audio_pad = gst_element_get_static_pad(gcc->priv->encoder_bin, "audio");
- gst_pad_send_event(audio_pad, event);
- gst_object_unref(audio_pad);
+ gst_event_ref (event);
+ audio_pad = gst_element_get_static_pad (gcc->priv->encoder_bin, "audio");
+ gst_pad_send_event (audio_pad, event);
+ gst_object_unref (audio_pad);
}
- video_pad = gst_element_get_static_pad(gcc->priv->encoder_bin, "video");
- gst_pad_send_event(video_pad, event);
- gst_object_unref(video_pad);
+ video_pad = gst_element_get_static_pad (gcc->priv->encoder_bin, "video");
+ gst_pad_send_event (video_pad, event);
+ gst_object_unref (video_pad);
}
static void
@@ -1289,13 +1323,14 @@ gcc_element_msg_sync (GstBus * bus, GstMessage * msg, gpointer data)
g_return_if_fail (gcc->priv->xoverlay != NULL);
g_return_if_fail (gcc->priv->window_handle != 0);
- g_object_set (GST_ELEMENT (gcc->priv->xoverlay), "force-aspect-ratio", TRUE, NULL);
+ g_object_set (GST_ELEMENT (gcc->priv->xoverlay), "force-aspect-ratio", TRUE,
+ NULL);
lgm_set_window_handle (gcc->priv->xoverlay, gcc->priv->window_handle);
}
}
static gboolean
-gcc_get_video_stream_info (GstPad *pad, GstPad *peer, GstCameraCapturer * gcc)
+gcc_get_video_stream_info (GstPad * pad, GstPad * peer, GstCameraCapturer * gcc)
{
GstStructure *s;
GstCaps *caps;
@@ -1388,13 +1423,13 @@ finish:
* ****************************************/
GList *
-gst_camera_capturer_enum_video_devices (const gchar *device)
+gst_camera_capturer_enum_video_devices (const gchar * device)
{
return gst_camera_capturer_enum_devices (device);
}
GList *
-gst_camera_capturer_enum_audio_devices (const gchar *device)
+gst_camera_capturer_enum_audio_devices (const gchar * device)
{
return gst_camera_capturer_enum_devices (device);
}
@@ -1429,9 +1464,10 @@ gst_camera_capturer_start (GstCameraCapturer * gcc)
g_return_if_fail (gcc != NULL);
g_return_if_fail (GST_IS_CAMERA_CAPTURER (gcc));
- GST_INFO_OBJECT(gcc, "Started capture");
+ GST_INFO_OBJECT (gcc, "Started capture");
g_mutex_lock (&gcc->priv->recording_lock);
- if (!gcc->priv->is_recording && gcc->priv->accum_recorded_ts == GST_CLOCK_TIME_NONE) {
+ if (!gcc->priv->is_recording
+ && gcc->priv->accum_recorded_ts == GST_CLOCK_TIME_NONE) {
gcc->priv->accum_recorded_ts = 0;
gcc->priv->is_recording = TRUE;
gst_camera_capturer_link_encoder_bin (gcc);
@@ -1455,7 +1491,8 @@ gst_camera_capturer_toggle_pause (GstCameraCapturer * gcc)
}
g_mutex_unlock (&gcc->priv->recording_lock);
- GST_INFO_OBJECT(gcc, "Capture state changed to %s", gcc->priv->is_recording ? "recording": "paused");
+ GST_INFO_OBJECT (gcc, "Capture state changed to %s",
+ gcc->priv->is_recording ? "recording" : "paused");
}
static void
@@ -1584,16 +1621,16 @@ gst_camera_capturer_stop (GstCameraCapturer * gcc)
//supports it. When a device is disconnected, the source is locked
//in ::create(), blocking the streaming thread. We need to change its
//state to null, this way camerabin doesn't block in ::do_stop().
- gst_element_set_state(gcc->priv->source, GST_STATE_NULL);
+ gst_element_set_state (gcc->priv->source, GST_STATE_NULL);
#endif
- GST_INFO_OBJECT(gcc, "Closing capture");
+ GST_INFO_OBJECT (gcc, "Closing capture");
g_mutex_lock (&gcc->priv->recording_lock);
gcc->priv->closing_recording = TRUE;
gcc->priv->is_recording = FALSE;
g_mutex_unlock (&gcc->priv->recording_lock);
- gcc_encoder_send_event(gcc, gst_event_new_eos());
+ gcc_encoder_send_event (gcc, gst_event_new_eos ());
}
void
@@ -1601,16 +1638,15 @@ gst_camera_capturer_expose (GstCameraCapturer * gcc)
{
g_return_if_fail (gcc != NULL);
- if (gcc->priv->xoverlay != NULL &&
- GST_IS_X_OVERLAY (gcc->priv->xoverlay)) {
+ if (gcc->priv->xoverlay != NULL && GST_IS_X_OVERLAY (gcc->priv->xoverlay)) {
gst_x_overlay_expose (gcc->priv->xoverlay);
}
}
void
-gst_camera_capturer_configure (GstCameraCapturer *gcc,
+gst_camera_capturer_configure (GstCameraCapturer * gcc,
const gchar * filename, CaptureSourceType source,
- const gchar *source_element, const gchar *device_id,
+ const gchar * source_element, const gchar * device_id,
VideoEncoderType video_encoder, AudioEncoderType audio_encoder,
VideoMuxerType muxer, guint video_bitrate, guint audio_bitrate,
guint record_audio, guint output_width, guint output_height,
diff --git a/libcesarplayer/gst-nle-source.c b/libcesarplayer/gst-nle-source.c
index 7baa0c5..0680428 100644
--- a/libcesarplayer/gst-nle-source.c
+++ b/libcesarplayer/gst-nle-source.c
@@ -287,8 +287,9 @@ gst_nle_source_push_buffer (GstNleSource * nlesrc, GstBuffer * buf,
if (buf_ts < item->start) {
GST_LOG_OBJECT (nlesrc, "Discard early %s buffer with ts: %"
- GST_TIME_FORMAT" start: %" GST_TIME_FORMAT, is_audio ? "audio" : "video",
- GST_TIME_ARGS (buf_ts), GST_TIME_ARGS (item->start));
+ GST_TIME_FORMAT " start: %" GST_TIME_FORMAT,
+ is_audio ? "audio" : "video", GST_TIME_ARGS (buf_ts),
+ GST_TIME_ARGS (item->start));
gst_buffer_unref (buf);
return GST_FLOW_OK;
}
@@ -336,7 +337,7 @@ gst_nle_source_push_buffer (GstNleSource * nlesrc, GstBuffer * buf,
}
static GstBuffer *
-gst_nle_source_audio_silence_buf (GstNleSource *nlesrc, guint64 start,
+gst_nle_source_audio_silence_buf (GstNleSource * nlesrc, guint64 start,
guint64 duration)
{
GstBuffer *buf;
@@ -375,17 +376,17 @@ gst_nle_source_no_more_pads (GstElement * element, GstNleSource * nlesrc)
duration = item->duration / item->rate;
/* Push the start buffer and last 2 ones and let audiorate fill the gap */
- buf = gst_nle_source_audio_silence_buf (nlesrc, item->start, 20 * GST_MSECOND);
+ buf =
+ gst_nle_source_audio_silence_buf (nlesrc, item->start,
+ 20 * GST_MSECOND);
gst_nle_source_push_buffer (nlesrc, buf, TRUE);
buf = gst_nle_source_audio_silence_buf (nlesrc,
- item->start + duration - 40 * GST_MSECOND,
- 20 * GST_MSECOND);
+ item->start + duration - 40 * GST_MSECOND, 20 * GST_MSECOND);
gst_nle_source_push_buffer (nlesrc, buf, TRUE);
buf = gst_nle_source_audio_silence_buf (nlesrc,
- item->start + duration - 20 * GST_MSECOND,
- 20 * GST_MSECOND);
+ item->start + duration - 20 * GST_MSECOND, 20 * GST_MSECOND);
gst_nle_source_push_buffer (nlesrc, buf, TRUE);
}
}
@@ -644,8 +645,7 @@ gst_nle_source_next (GstNleSource * nlesrc)
GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,
GST_TIME_ARGS (nlesrc->start_ts));
gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);
- ret = gst_element_get_state (nlesrc->decoder, &state,
- NULL, 5 * GST_SECOND);
+ ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 5 * GST_SECOND);
if (ret == GST_STATE_CHANGE_FAILURE) {
GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");
gst_nle_source_check_eos (nlesrc);
@@ -694,7 +694,8 @@ gst_nle_source_change_state (GstElement * element, GstStateChange transition)
gst_element_set_state (nlesrc->decoder, GST_STATE_READY);
}
if (nlesrc->queue != NULL) {
- g_list_free_full (nlesrc->queue, (GDestroyNotify) gst_nle_source_item_free);
+ g_list_free_full (nlesrc->queue,
+ (GDestroyNotify) gst_nle_source_item_free);
nlesrc->queue = NULL;
}
break;
diff --git a/libcesarplayer/gst-remuxer.c b/libcesarplayer/gst-remuxer.c
index 193606f..5494010 100644
--- a/libcesarplayer/gst-remuxer.c
+++ b/libcesarplayer/gst-remuxer.c
@@ -83,8 +83,7 @@ gst_remuxer_init (GstRemuxer * object)
{
GstRemuxerPrivate *priv;
object->priv = priv =
- G_TYPE_INSTANCE_GET_PRIVATE (object, GST_TYPE_REMUXER,
- GstRemuxerPrivate);
+ G_TYPE_INSTANCE_GET_PRIVATE (object, GST_TYPE_REMUXER, GstRemuxerPrivate);
priv->input_file = NULL;
priv->output_file = NULL;
@@ -107,7 +106,8 @@ gst_remuxer_finalize (GObject * object)
gst_bus_set_flushing (remuxer->priv->bus, TRUE);
if (remuxer->priv->sig_bus_async)
- g_signal_handler_disconnect (remuxer->priv->bus, remuxer->priv->sig_bus_async);
+ g_signal_handler_disconnect (remuxer->priv->bus,
+ remuxer->priv->sig_bus_async);
gst_object_unref (remuxer->priv->bus);
remuxer->priv->bus = NULL;
@@ -125,8 +125,7 @@ gst_remuxer_finalize (GObject * object)
if (remuxer->priv->main_pipeline != NULL
&& GST_IS_ELEMENT (remuxer->priv->main_pipeline)) {
- gst_element_set_state (remuxer->priv->main_pipeline,
- GST_STATE_NULL);
+ gst_element_set_state (remuxer->priv->main_pipeline, GST_STATE_NULL);
gst_object_unref (remuxer->priv->main_pipeline);
remuxer->priv->main_pipeline = NULL;
}
@@ -149,18 +148,18 @@ gst_remuxer_class_init (GstRemuxerClass * klass)
/* Signals */
remuxer_signals[SIGNAL_ERROR] =
g_signal_new ("error",
- G_TYPE_FROM_CLASS (object_class),
- G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET (GstRemuxerClass, error),
- NULL, NULL,
- g_cclosure_marshal_VOID__STRING, G_TYPE_NONE, 1, G_TYPE_STRING);
+ G_TYPE_FROM_CLASS (object_class),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstRemuxerClass, error),
+ NULL, NULL,
+ g_cclosure_marshal_VOID__STRING, G_TYPE_NONE, 1, G_TYPE_STRING);
remuxer_signals[SIGNAL_PERCENT] =
g_signal_new ("percent_completed",
- G_TYPE_FROM_CLASS (object_class),
- G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET (GstRemuxerClass, percent_completed),
- NULL, NULL, g_cclosure_marshal_VOID__FLOAT, G_TYPE_NONE, 1, G_TYPE_FLOAT);
+ G_TYPE_FROM_CLASS (object_class),
+ G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET (GstRemuxerClass, percent_completed),
+ NULL, NULL, g_cclosure_marshal_VOID__FLOAT, G_TYPE_NONE, 1, G_TYPE_FLOAT);
}
/***********************************
@@ -187,8 +186,7 @@ gst_remuxer_error_quark (void)
}
static GstElement *
-gst_remuxer_create_video_muxer (GstRemuxer * remuxer,
- VideoMuxerType type)
+gst_remuxer_create_video_muxer (GstRemuxer * remuxer, VideoMuxerType type)
{
GstElement *muxer;
@@ -218,7 +216,7 @@ gst_remuxer_create_video_muxer (GstRemuxer * remuxer,
}
static gboolean
-gst_remuxer_fix_video_ts (GstPad *pad, GstBuffer *buf, GstRemuxer *remuxer)
+gst_remuxer_fix_video_ts (GstPad * pad, GstBuffer * buf, GstRemuxer * remuxer)
{
if (GST_BUFFER_TIMESTAMP (buf) == GST_CLOCK_TIME_NONE) {
GST_BUFFER_TIMESTAMP (buf) = remuxer->priv->last_video_buf_ts;
@@ -229,8 +227,8 @@ gst_remuxer_fix_video_ts (GstPad *pad, GstBuffer *buf, GstRemuxer *remuxer)
}
static gboolean
-gst_remuxer_pad_added_cb (GstElement *demuxer, GstPad *pad,
- GstRemuxer *remuxer)
+gst_remuxer_pad_added_cb (GstElement * demuxer, GstPad * pad,
+ GstRemuxer * remuxer)
{
GstElement *muxer, *queue;
GstElement *parser = NULL;
@@ -251,11 +249,14 @@ gst_remuxer_pad_added_cb (GstElement *demuxer, GstPad *pad,
if (g_strrstr (mime, "video/x-h264")) {
GstPad *parser_pad;
- parser = gst_element_factory_make("h264parse", "video-parser");
- parser_caps = gst_caps_from_string("video/x-h264, stream-format=avc, alignment=au");
+ parser = gst_element_factory_make ("h264parse", "video-parser");
+ parser_caps =
+ gst_caps_from_string
+ ("video/x-h264, stream-format=avc, alignment=au");
parser_pad = gst_element_get_static_pad (parser, "src");
- gst_pad_add_buffer_probe (parser_pad, (GCallback)gst_remuxer_fix_video_ts, remuxer);
+ gst_pad_add_buffer_probe (parser_pad,
+ (GCallback) gst_remuxer_fix_video_ts, remuxer);
}
is_video = TRUE;
} else if (g_strrstr (mime, "audio") && !remuxer->priv->audio_linked) {
@@ -268,9 +269,9 @@ gst_remuxer_pad_added_cb (GstElement *demuxer, GstPad *pad,
//parser = gst_element_factory_make ("aacparse", NULL);
parser = gst_parse_bin_from_description ("faad ! faac", TRUE, NULL);
} else if (version == 3) {
- parser = gst_element_factory_make("mp3parse", "audio-parser");
+ parser = gst_element_factory_make ("mp3parse", "audio-parser");
} else {
- parser = gst_element_factory_make("mpegaudioparse", "audio-parser");
+ parser = gst_element_factory_make ("mpegaudioparse", "audio-parser");
}
} else if (g_strrstr (mime, "audio/x-eac3")) {
parser = gst_element_factory_make ("ac3parse", NULL);
@@ -285,9 +286,9 @@ gst_remuxer_pad_added_cb (GstElement *demuxer, GstPad *pad,
return TRUE;
}
- muxer = gst_bin_get_by_name (GST_BIN(remuxer->priv->main_pipeline), "muxer");
+ muxer = gst_bin_get_by_name (GST_BIN (remuxer->priv->main_pipeline), "muxer");
if (parser != NULL) {
- gst_bin_add (GST_BIN(remuxer->priv->main_pipeline), parser);
+ gst_bin_add (GST_BIN (remuxer->priv->main_pipeline), parser);
gst_element_set_state (parser, GST_STATE_PLAYING);
if (parser_caps) {
gst_element_link_filtered (parser, muxer, parser_caps);
@@ -316,7 +317,7 @@ gst_remuxer_pad_added_cb (GstElement *demuxer, GstPad *pad,
remuxer->priv->audio_linked = TRUE;
}
queue = gst_element_factory_make ("queue2", NULL);
- gst_bin_add (GST_BIN(remuxer->priv->main_pipeline), queue);
+ gst_bin_add (GST_BIN (remuxer->priv->main_pipeline), queue);
gst_element_set_state (queue, GST_STATE_PLAYING);
queue_sink_pad = gst_element_get_static_pad (queue, "sink");
queue_src_pad = gst_element_get_static_pad (queue, "src");
@@ -333,8 +334,8 @@ gst_remuxer_pad_added_cb (GstElement *demuxer, GstPad *pad,
}
static gboolean
-gst_remuxer_have_type_cb (GstElement *typefind, guint prob,
- GstCaps *caps, GstRemuxer *remuxer)
+gst_remuxer_have_type_cb (GstElement * typefind, guint prob,
+ GstCaps * caps, GstRemuxer * remuxer)
{
GstElement *demuxer = NULL;
GstElement *parser = NULL;
@@ -371,13 +372,13 @@ gst_remuxer_have_type_cb (GstElement *typefind, guint prob,
}
if (demuxer) {
- gst_bin_add (GST_BIN(remuxer->priv->main_pipeline), demuxer);
+ gst_bin_add (GST_BIN (remuxer->priv->main_pipeline), demuxer);
gst_element_link (typefind, demuxer);
g_signal_connect (demuxer, "pad-added",
G_CALLBACK (gst_remuxer_pad_added_cb), remuxer);
} else if (parser) {
GstPad *pad;
- gst_bin_add (GST_BIN(remuxer->priv->main_pipeline), parser);
+ gst_bin_add (GST_BIN (remuxer->priv->main_pipeline), parser);
gst_element_link (typefind, parser);
pad = gst_element_get_static_pad (parser, "src");
gst_remuxer_pad_added_cb (parser, pad, remuxer);
@@ -394,7 +395,7 @@ gst_remuxer_have_type_cb (GstElement *typefind, guint prob,
}
static void
-gst_remuxer_initialize (GstRemuxer *remuxer)
+gst_remuxer_initialize (GstRemuxer * remuxer)
{
GstElement *filesrc, *typefind, *muxer, *filesink;
@@ -403,20 +404,21 @@ gst_remuxer_initialize (GstRemuxer *remuxer)
/* Create elements */
remuxer->priv->main_pipeline = gst_pipeline_new ("pipeline");
- filesrc = gst_element_factory_make("filesrc", "source");
- typefind = gst_element_factory_make("typefind", "typefind");
- muxer = gst_remuxer_create_video_muxer (remuxer, remuxer->priv->video_muxer_type);
- filesink = gst_element_factory_make("filesink", "sink");
+ filesrc = gst_element_factory_make ("filesrc", "source");
+ typefind = gst_element_factory_make ("typefind", "typefind");
+ muxer =
+ gst_remuxer_create_video_muxer (remuxer, remuxer->priv->video_muxer_type);
+ filesink = gst_element_factory_make ("filesink", "sink");
/* Set properties */
g_object_set (filesrc, "location", remuxer->priv->input_file, NULL);
g_object_set (filesink, "location", remuxer->priv->output_file, NULL);
/* Add elements to the bin */
- gst_bin_add_many(GST_BIN(remuxer->priv->main_pipeline), filesrc, typefind,
+ gst_bin_add_many (GST_BIN (remuxer->priv->main_pipeline), filesrc, typefind,
muxer, filesink, NULL);
- gst_element_link(filesrc, typefind);
- gst_element_link(muxer, filesink);
+ gst_element_link (filesrc, typefind);
+ gst_element_link (muxer, filesink);
g_signal_connect (typefind, "have-type",
G_CALLBACK (gst_remuxer_have_type_cb), remuxer);
@@ -514,7 +516,7 @@ gst_remuxer_cancel (GstRemuxer * remuxer)
}
GstRemuxer *
-gst_remuxer_new (gchar * input_file, gchar *output_file,
+gst_remuxer_new (gchar * input_file, gchar * output_file,
VideoMuxerType muxer, GError ** err)
{
GstRemuxer *remuxer = NULL;
diff --git a/libcesarplayer/gst-video-encoder.c b/libcesarplayer/gst-video-encoder.c
index 43d5b45..80b7fe8 100644
--- a/libcesarplayer/gst-video-encoder.c
+++ b/libcesarplayer/gst-video-encoder.c
@@ -84,7 +84,7 @@ static int gve_signals[LAST_SIGNAL] = { 0 };
static void gve_error_msg (GstVideoEncoder * gcc, GstMessage * msg);
static void gve_bus_message_cb (GstBus * bus, GstMessage * message,
gpointer data);
-static gboolean gst_video_encoder_select_next_file (GstVideoEncoder *gve);
+static gboolean gst_video_encoder_select_next_file (GstVideoEncoder * gve);
G_DEFINE_TYPE (GstVideoEncoder, gst_video_encoder, G_TYPE_OBJECT);
@@ -209,14 +209,14 @@ gst_video_encoder_error_quark (void)
}
static gboolean
-gve_on_buffer_cb (GstPad *pad, GstBuffer *buf, GstVideoEncoder *gve)
+gve_on_buffer_cb (GstPad * pad, GstBuffer * buf, GstVideoEncoder * gve)
{
gve->priv->last_buf_ts = g_get_monotonic_time ();
return TRUE;
}
static void
-gst_video_encoder_create_encoder_bin (GstVideoEncoder *gve)
+gst_video_encoder_create_encoder_bin (GstVideoEncoder * gve)
{
GstElement *colorspace1, *videoscale, *framerate, *deinterlace;
GstElement *colorspace2, *audioconvert, *audioresample;
@@ -228,14 +228,14 @@ gst_video_encoder_create_encoder_bin (GstVideoEncoder *gve)
GST_INFO_OBJECT (gve, "Creating encoder bin");
gve->priv->encoder_bin = gst_bin_new ("encoder_bin");
- colorspace1 = gst_element_factory_make("ffmpegcolorspace", NULL);
- deinterlace = gst_element_factory_make("ffdeinterlace", NULL);
- colorspace2 = gst_element_factory_make("ffmpegcolorspace", "colorspace2");
- videoscale = gst_element_factory_make("videoscale", "gve_videoscale");
- framerate = gst_element_factory_make("videorate", "gve_videorate");
- audioconvert = gst_element_factory_make("audioconvert", NULL);
- audioresample = gst_element_factory_make("audioresample", NULL);
- gve->priv->filesink = gst_element_factory_make("filesink", NULL);
+ colorspace1 = gst_element_factory_make ("ffmpegcolorspace", NULL);
+ deinterlace = gst_element_factory_make ("ffdeinterlace", NULL);
+ colorspace2 = gst_element_factory_make ("ffmpegcolorspace", "colorspace2");
+ videoscale = gst_element_factory_make ("videoscale", "gve_videoscale");
+ framerate = gst_element_factory_make ("videorate", "gve_videorate");
+ audioconvert = gst_element_factory_make ("audioconvert", NULL);
+ audioresample = gst_element_factory_make ("audioresample", NULL);
+ gve->priv->filesink = gst_element_factory_make ("filesink", NULL);
aqueue = gst_element_factory_make ("queue2", "audio_queue");
vqueue = gst_element_factory_make ("queue2", "video_queue");
a_identity = gst_element_factory_make ("identity", "audio_identity");
@@ -258,22 +258,22 @@ gst_video_encoder_create_encoder_bin (GstVideoEncoder *gve)
gst_caps_set_simple (video_caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
1, 1, NULL);
if (gve->priv->output_width != 0) {
- gst_caps_set_simple (video_caps, "width", G_TYPE_INT, gve->priv->output_width,
- NULL);
+ gst_caps_set_simple (video_caps, "width", G_TYPE_INT,
+ gve->priv->output_width, NULL);
}
if (gve->priv->output_height != 0) {
- gst_caps_set_simple (video_caps, "height", G_TYPE_INT, gve->priv->output_height,
- NULL);
+ gst_caps_set_simple (video_caps, "height", G_TYPE_INT,
+ gve->priv->output_height, NULL);
}
/* Set caps for the encoding framerate */
if (gve->priv->fps_n != 0 && gve->priv->fps_d != 0) {
- gst_caps_set_simple (video_caps, "framerate", GST_TYPE_FRACTION,
- gve->priv->fps_n, gve->priv->fps_d, NULL);
+ gst_caps_set_simple (video_caps, "framerate", GST_TYPE_FRACTION,
+ gve->priv->fps_n, gve->priv->fps_d, NULL);
}
/* Audio caps to fixate the channels and sample rate */
- audio_caps = gst_caps_from_string (
- "audio/x-raw-int, channels=(int)2, rate=(int)48000;"
+ audio_caps =
+ gst_caps_from_string ("audio/x-raw-int, channels=(int)2, rate=(int)48000;"
"audio/x-raw-float, channels=(int)2, rate=(int)48000");
/* Set caps for the h264 profile */
@@ -284,23 +284,24 @@ gst_video_encoder_create_encoder_bin (GstVideoEncoder *gve)
g_object_set (a_identity, "single-segment", TRUE, NULL);
g_object_set (v_identity, "single-segment", TRUE, NULL);
- gst_bin_add_many(GST_BIN(gve->priv->encoder_bin), v_identity, colorspace1,
+ gst_bin_add_many (GST_BIN (gve->priv->encoder_bin), v_identity, colorspace1,
deinterlace, videoscale, framerate, colorspace2,
gve->priv->video_enc, vqueue, gve->priv->muxer, gve->priv->filesink,
- a_identity, audioconvert, audioresample, gve->priv->audio_enc, aqueue, NULL);
+ a_identity, audioconvert, audioresample, gve->priv->audio_enc, aqueue,
+ NULL);
- gst_element_link_many(v_identity, colorspace1, deinterlace, framerate,
+ gst_element_link_many (v_identity, colorspace1, deinterlace, framerate,
videoscale, colorspace2, NULL);
gst_element_link_filtered (colorspace2, gve->priv->video_enc, video_caps);
gst_element_link_filtered (gve->priv->video_enc, vqueue, h264_caps);
gst_element_link (vqueue, gve->priv->muxer);
- gst_element_link_many(a_identity, audioconvert, audioresample, NULL);
+ gst_element_link_many (a_identity, audioconvert, audioresample, NULL);
gst_element_link_filtered (audioresample, gve->priv->audio_enc, audio_caps);
gst_element_link_many (gve->priv->audio_enc, aqueue, gve->priv->muxer, NULL);
- gst_element_link(gve->priv->muxer, gve->priv->filesink);
+ gst_element_link (gve->priv->muxer, gve->priv->filesink);
- gst_caps_unref(video_caps);
- gst_caps_unref(audio_caps);
+ gst_caps_unref (video_caps);
+ gst_caps_unref (audio_caps);
gst_caps_unref (h264_caps);
g_object_set (gve->priv->filesink, "location", gve->priv->output_file, NULL);
@@ -325,7 +326,7 @@ gst_video_encoder_create_encoder_bin (GstVideoEncoder *gve)
}
static gboolean
-cb_handle_eos (GstPad *pad, GstEvent *event, GstVideoEncoder *gve)
+cb_handle_eos (GstPad * pad, GstEvent * event, GstVideoEncoder * gve)
{
if (event->type == GST_EVENT_EOS) {
GST_DEBUG_OBJECT (gve, "Dropping EOS on pad %s:%s",
@@ -336,7 +337,7 @@ cb_handle_eos (GstPad *pad, GstEvent *event, GstVideoEncoder *gve)
gve->priv->video_drained = TRUE;
}
if (gve->priv->audio_drained && gve->priv->video_drained) {
- g_idle_add ((GSourceFunc)gst_video_encoder_select_next_file, gve);
+ g_idle_add ((GSourceFunc) gst_video_encoder_select_next_file, gve);
}
return FALSE;
}
@@ -344,7 +345,7 @@ cb_handle_eos (GstPad *pad, GstEvent *event, GstVideoEncoder *gve)
}
static void
-cb_new_pad (GstElement *decodebin, GstPad *pad, GstVideoEncoder *gve)
+cb_new_pad (GstElement * decodebin, GstPad * pad, GstVideoEncoder * gve)
{
GstPad *epad = NULL;
GstCaps *caps;
@@ -383,27 +384,28 @@ cb_new_pad (GstElement *decodebin, GstPad *pad, GstVideoEncoder *gve)
}
static void
-gst_video_encoder_create_source (GstVideoEncoder *gve, gchar *location)
+gst_video_encoder_create_source (GstVideoEncoder * gve, gchar * location)
{
GST_INFO_OBJECT (gve, "Creating source");
if (gve->priv->source_bin != NULL) {
gst_element_set_state (gve->priv->source_bin, GST_STATE_NULL);
- gst_bin_remove (GST_BIN(gve->priv->main_pipeline), gve->priv->source_bin);
+ gst_bin_remove (GST_BIN (gve->priv->main_pipeline), gve->priv->source_bin);
}
gve->priv->source_bin = gst_element_factory_make ("uridecodebin", NULL);
g_object_set (gve->priv->source_bin, "uri", location, NULL);
- g_signal_connect (gve->priv->source_bin, "pad-added", G_CALLBACK (cb_new_pad), gve);
+ g_signal_connect (gve->priv->source_bin, "pad-added", G_CALLBACK (cb_new_pad),
+ gve);
g_signal_connect (gve->priv->source_bin, "autoplug-select",
G_CALLBACK (lgm_filter_video_decoders), gve);
- gst_bin_add (GST_BIN(gve->priv->main_pipeline), gve->priv->source_bin);
+ gst_bin_add (GST_BIN (gve->priv->main_pipeline), gve->priv->source_bin);
gst_element_sync_state_with_parent (gve->priv->source_bin);
gve->priv->audio_drained = FALSE;
gve->priv->video_drained = FALSE;
}
static gboolean
-gst_video_encoder_select_next_file (GstVideoEncoder *gve)
+gst_video_encoder_select_next_file (GstVideoEncoder * gve)
{
GstPad *audio_pad, *video_pad;
@@ -432,7 +434,8 @@ gst_video_encoder_select_next_file (GstVideoEncoder *gve)
gst_pad_unlink (v_peer, video_pad);
gst_object_unref (v_peer);
}
- gst_video_encoder_create_source (gve, (gchar *) gve->priv->current_file->data);
+ gst_video_encoder_create_source (gve,
+ (gchar *) gve->priv->current_file->data);
} else {
GST_INFO_OBJECT (gve, "No more files, sending EOS");
if (gve->priv->update_id != 0) {
@@ -444,8 +447,8 @@ gst_video_encoder_select_next_file (GstVideoEncoder *gve)
"max-size-bytes", 0, "max-size-buffers", 0, NULL);
g_object_set (gve->priv->vqueue, "max-size-time", 0,
"max-size-bytes", 0, "max-size-buffers", 0, NULL);
- gst_pad_send_event (audio_pad, gst_event_new_eos());
- gst_pad_send_event (video_pad, gst_event_new_eos());
+ gst_pad_send_event (audio_pad, gst_event_new_eos ());
+ gst_pad_send_event (video_pad, gst_event_new_eos ());
}
return FALSE;
}
@@ -509,19 +512,19 @@ gst_video_encoder_create_video_muxer (GstVideoEncoder * gve,
}
static void
-gst_video_encoder_initialize (GstVideoEncoder *gve)
+gst_video_encoder_initialize (GstVideoEncoder * gve)
{
- GError *err= NULL;
+ GError *err = NULL;
GST_INFO_OBJECT (gve, "Initializing encoders");
- if (!gst_video_encoder_create_video_encoder(gve,
- gve->priv->video_encoder_type, &err))
+ if (!gst_video_encoder_create_video_encoder (gve,
+ gve->priv->video_encoder_type, &err))
goto missing_plugin;
- if (!gst_video_encoder_create_audio_encoder(gve,
- gve->priv->audio_encoder_type, &err))
+ if (!gst_video_encoder_create_audio_encoder (gve,
+ gve->priv->audio_encoder_type, &err))
goto missing_plugin;
- if (!gst_video_encoder_create_video_muxer(gve,
- gve->priv->video_muxer_type, &err))
+ if (!gst_video_encoder_create_video_muxer (gve,
+ gve->priv->video_muxer_type, &err))
goto missing_plugin;
gst_video_encoder_create_encoder_bin (gve);
@@ -530,8 +533,8 @@ gst_video_encoder_initialize (GstVideoEncoder *gve)
return;
missing_plugin:
- g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, err->message);
- g_error_free (err);
+ g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, err->message);
+ g_error_free (err);
}
static void
@@ -614,7 +617,7 @@ gst_video_encoder_query_timeout (GstVideoEncoder * gve)
MIN (0.99, (gfloat) pos / (gfloat) gve->priv->total_duration));
if (g_get_monotonic_time () - gve->priv->last_buf_ts > 4 * 1000000) {
- g_idle_add ((GSourceFunc)gst_video_encoder_select_next_file, gve);
+ g_idle_add ((GSourceFunc) gst_video_encoder_select_next_file, gve);
}
return TRUE;
@@ -632,11 +635,11 @@ gst_video_encoder_cancel (GstVideoEncoder * gve)
g_return_if_fail (gve != NULL);
g_return_if_fail (GST_IS_VIDEO_ENCODER (gve));
- g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) -1);
+ g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) - 1);
gst_element_set_state (gve->priv->main_pipeline, GST_STATE_NULL);
gst_element_get_state (gve->priv->main_pipeline, NULL, NULL, -1);
- gst_bin_remove (GST_BIN(gve->priv->main_pipeline), gve->priv->source_bin);
- gst_bin_remove (GST_BIN(gve->priv->main_pipeline), gve->priv->encoder_bin);
+ gst_bin_remove (GST_BIN (gve->priv->main_pipeline), gve->priv->source_bin);
+ gst_bin_remove (GST_BIN (gve->priv->main_pipeline), gve->priv->encoder_bin);
gve->priv->total_duration = 0;
if (gve->priv->update_id != 0) {
g_source_remove (gve->priv->update_id);
@@ -650,7 +653,7 @@ gst_video_encoder_start (GstVideoEncoder * gve)
g_return_if_fail (gve != NULL);
g_return_if_fail (GST_IS_VIDEO_ENCODER (gve));
- GST_INFO_OBJECT(gve, "Starting encoding");
+ GST_INFO_OBJECT (gve, "Starting encoding");
g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) 0);
gst_video_encoder_initialize (gve);
gve->priv->last_buf_ts = g_get_monotonic_time ();
@@ -660,16 +663,17 @@ gst_video_encoder_start (GstVideoEncoder * gve)
}
void
-gst_video_encoder_add_file (GstVideoEncoder * gve, const gchar *file, guint64 duration)
+gst_video_encoder_add_file (GstVideoEncoder * gve, const gchar * file,
+ guint64 duration)
{
gchar *uri;
g_return_if_fail (gve != NULL);
g_return_if_fail (GST_IS_VIDEO_ENCODER (gve));
- GST_INFO_OBJECT(gve, "Adding file %s", file);
+ GST_INFO_OBJECT (gve, "Adding file %s", file);
uri = lgm_filename_to_uri (file);
if (uri == NULL) {
- GST_ERROR_OBJECT(gve, "Invalid filename %s", file);
+ GST_ERROR_OBJECT (gve, "Invalid filename %s", file);
}
gve->priv->input_files = g_list_append (gve->priv->input_files, uri);
gve->priv->total_duration += duration * GST_MSECOND;
@@ -700,6 +704,7 @@ gst_video_encoder_set_encoding_format (GstVideoEncoder * gve,
gve->priv->fps_d = fps_d;
}
+
GstVideoEncoder *
gst_video_encoder_new (gchar * filename, GError ** err)
{
diff --git a/libcesarplayer/lgm-video-player.c b/libcesarplayer/lgm-video-player.c
index 14f166d..6e8a7fb 100644
--- a/libcesarplayer/lgm-video-player.c
+++ b/libcesarplayer/lgm-video-player.c
@@ -56,17 +56,18 @@ enum
LAST_SIGNAL
};
-typedef enum {
- GST_PLAY_FLAG_VIDEO = (1 << 0),
- GST_PLAY_FLAG_AUDIO = (1 << 1),
- GST_PLAY_FLAG_TEXT = (1 << 2),
- GST_PLAY_FLAG_VIS = (1 << 3),
- GST_PLAY_FLAG_SOFT_VOLUME = (1 << 4),
- GST_PLAY_FLAG_NATIVE_AUDIO = (1 << 5),
- GST_PLAY_FLAG_NATIVE_VIDEO = (1 << 6),
- GST_PLAY_FLAG_DOWNLOAD = (1 << 7),
- GST_PLAY_FLAG_BUFFERING = (1 << 8),
- GST_PLAY_FLAG_DEINTERLACE = (1 << 9),
+typedef enum
+{
+ GST_PLAY_FLAG_VIDEO = (1 << 0),
+ GST_PLAY_FLAG_AUDIO = (1 << 1),
+ GST_PLAY_FLAG_TEXT = (1 << 2),
+ GST_PLAY_FLAG_VIS = (1 << 3),
+ GST_PLAY_FLAG_SOFT_VOLUME = (1 << 4),
+ GST_PLAY_FLAG_NATIVE_AUDIO = (1 << 5),
+ GST_PLAY_FLAG_NATIVE_VIDEO = (1 << 6),
+ GST_PLAY_FLAG_DOWNLOAD = (1 << 7),
+ GST_PLAY_FLAG_BUFFERING = (1 << 8),
+ GST_PLAY_FLAG_DEINTERLACE = (1 << 9),
GST_PLAY_FLAG_SOFT_COLORBALANCE = (1 << 10)
} GstPlayFlags;
@@ -102,8 +103,7 @@ struct LgmVideoPlayerPrivate
static void lgm_video_player_finalize (GObject * object);
static gboolean lgm_query_timeout (LgmVideoPlayer * lvp);
-static GError *lgm_error_from_gst_error (LgmVideoPlayer * lvp,
- GstMessage * m);
+static GError *lgm_error_from_gst_error (LgmVideoPlayer * lvp, GstMessage * m);
static GstElementClass *parent_class = NULL;
@@ -166,8 +166,9 @@ lgm_element_msg_sync_cb (GstBus * bus, GstMessage * msg, gpointer data)
if (lvp->priv->xoverlay != NULL) {
gst_object_unref (lvp->priv->xoverlay);
}
- lvp->priv->xoverlay = (GstXOverlay *) gst_object_ref (GST_X_OVERLAY (sender));
- lgm_set_window_handle(lvp->priv->xoverlay, lvp->priv->window_handle);
+ lvp->priv->xoverlay =
+ (GstXOverlay *) gst_object_ref (GST_X_OVERLAY (sender));
+ lgm_set_window_handle (lvp->priv->xoverlay, lvp->priv->window_handle);
g_mutex_unlock (&lvp->priv->overlay_lock);
}
}
@@ -197,7 +198,7 @@ lgm_bus_message_cb (GstBus * bus, GstMessage * message, gpointer data)
gst_element_set_state (lvp->priv->play, GST_STATE_NULL);
g_signal_emit (lvp, lgm_signals[SIGNAL_ERROR], 0,
- error->message, TRUE, FALSE);
+ error->message, TRUE, FALSE);
g_error_free (error);
break;
}
@@ -297,8 +298,7 @@ lgm_query_timeout (LgmVideoPlayer * lvp)
}
static void
-lgm_parse_stream_caps (GstPad *pad, GstPad *peer,
- LgmVideoPlayer *lvp)
+lgm_parse_stream_caps (GstPad * pad, GstPad * peer, LgmVideoPlayer * lvp)
{
GstCaps *caps;
GstStructure *s;
@@ -342,19 +342,20 @@ lgm_error_from_gst_error (LgmVideoPlayer * lvp, GstMessage * err_msg)
gst_message_parse_error (err_msg, &e, NULL);
if (is_error (e, RESOURCE, NOT_FOUND) || is_error (e, RESOURCE, OPEN_READ)) {
- if (e->code == GST_RESOURCE_ERROR_NOT_FOUND) {
- ret = g_error_new_literal (LGM_ERROR, GST_ERROR_FILE_NOT_FOUND,
- _("Location not found."));
- } else {
- ret = g_error_new_literal (LGM_ERROR, GST_ERROR_FILE_PERMISSION,
- _("Could not open location; "
- "you might not have permission to open the file."));
- }
+ if (e->code == GST_RESOURCE_ERROR_NOT_FOUND) {
+ ret = g_error_new_literal (LGM_ERROR, GST_ERROR_FILE_NOT_FOUND,
+ _("Location not found."));
+ } else {
+ ret = g_error_new_literal (LGM_ERROR, GST_ERROR_FILE_PERMISSION,
+ _("Could not open location; "
+ "you might not have permission to open the file."));
+ }
} else if (e->domain == GST_RESOURCE_ERROR) {
ret = g_error_new_literal (LGM_ERROR, GST_ERROR_FILE_GENERIC, e->message);
} else if (is_error (e, CORE, MISSING_PLUGIN) ||
is_error (e, STREAM, CODEC_NOT_FOUND)) {
- gchar *msg = "The playback of this movie requires a plugin which is not installed.";
+ gchar *msg =
+ "The playback of this movie requires a plugin which is not installed.";
ret = g_error_new_literal (LGM_ERROR, GST_ERROR_CODEC_NOT_HANDLED, msg);
} else if (is_error (e, STREAM, WRONG_TYPE) ||
is_error (e, STREAM, NOT_IMPLEMENTED)) {
@@ -383,7 +384,9 @@ poll_for_state_change_full (LgmVideoPlayer * lvp, GstElement * element,
GstMessageType events;
bus = gst_element_get_bus (element);
- events = (GstMessageType) (GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
+ events =
+ (GstMessageType) (GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR |
+ GST_MESSAGE_EOS);
while (TRUE) {
GstMessage *message;
@@ -458,8 +461,7 @@ error:
}
gboolean
-lgm_video_player_open (LgmVideoPlayer * lvp,
- const gchar * uri, GError ** error)
+lgm_video_player_open (LgmVideoPlayer * lvp, const gchar * uri, GError ** error)
{
GstMessage *err_msg = NULL;
gboolean ret;
@@ -798,8 +800,7 @@ lgm_video_player_expose (LgmVideoPlayer * lvp)
g_return_if_fail (lvp != NULL);
g_mutex_lock (&lvp->priv->overlay_lock);
- if (lvp->priv->xoverlay != NULL &&
- GST_IS_X_OVERLAY (lvp->priv->xoverlay)) {
+ if (lvp->priv->xoverlay != NULL && GST_IS_X_OVERLAY (lvp->priv->xoverlay)) {
gst_x_overlay_expose (lvp->priv->xoverlay);
}
g_mutex_unlock (&lvp->priv->overlay_lock);
@@ -893,13 +894,14 @@ lgm_video_player_get_current_frame (LgmVideoPlayer * lvp)
return pixbuf;
}
-void lgm_video_player_set_window_handle (LgmVideoPlayer *lvp,
+void
+lgm_video_player_set_window_handle (LgmVideoPlayer * lvp,
guintptr window_handle)
{
g_mutex_lock (&lvp->priv->overlay_lock);
lvp->priv->window_handle = window_handle;
if (lvp->priv->xoverlay != NULL) {
- lgm_set_window_handle(lvp->priv->xoverlay, lvp->priv->window_handle);
+ lgm_set_window_handle (lvp->priv->xoverlay, lvp->priv->window_handle);
}
g_mutex_unlock (&lvp->priv->overlay_lock);
}
@@ -956,7 +958,7 @@ lgm_video_player_new (LgmUseType type, GError ** err)
g_object_set (audio_sink, "sync", TRUE, NULL);
}
- if (!video_sink || !audio_sink) {
+ if (!video_sink || !audio_sink) {
g_set_error (err, LGM_ERROR, GST_ERROR_VIDEO_PLUGIN,
_("No valid sink found."));
goto sink_error;
diff --git a/libcesarplayer/main.c b/libcesarplayer/main.c
index ff51aab..f0e0035 100644
--- a/libcesarplayer/main.c
+++ b/libcesarplayer/main.c
@@ -91,7 +91,7 @@ main (int argc, char *argv[])
gtk_widget_show (GTK_WIDGET (gvc));
gtk_widget_show (window);
- gst_camera_capturer_run(gvc);
+ gst_camera_capturer_run (gvc);
gtk_main ();
return 0;
diff --git a/libcesarplayer/test-capturer.c b/libcesarplayer/test-capturer.c
index c341b3a..e26974c 100644
--- a/libcesarplayer/test-capturer.c
+++ b/libcesarplayer/test-capturer.c
@@ -35,19 +35,19 @@ static int sargc;
static char **sargv;
static void
-rec_clicked_cb (GtkButton *b, GstCameraCapturer *gcc)
+rec_clicked_cb (GtkButton * b, GstCameraCapturer * gcc)
{
gst_camera_capturer_start (gcc);
}
static void
-stop_clicked (GtkButton *b, GstCameraCapturer *gcc)
+stop_clicked (GtkButton * b, GstCameraCapturer * gcc)
{
gst_camera_capturer_stop (gcc);
}
static void
-on_realized_cb (GtkWidget *video)
+on_realized_cb (GtkWidget * video)
{
GstCameraCapturer *gvc;
guintptr window;
@@ -57,14 +57,15 @@ on_realized_cb (GtkWidget *video)
gvc = gst_camera_capturer_new (&error);
gst_camera_capturer_configure (gvc, sargv[1], CAPTURE_SOURCE_TYPE_SYSTEM,
- sargv[2], sargv[3], VIDEO_ENCODER_H264, AUDIO_ENCODER_AAC, VIDEO_MUXER_MP4,
- 1000, 100, FALSE, 320, 240, window);
- gst_camera_capturer_run(gvc);
+ sargv[2], sargv[3], VIDEO_ENCODER_H264, AUDIO_ENCODER_AAC,
+ VIDEO_MUXER_MP4, 1000, 100, FALSE, 320, 240, window);
+ gst_camera_capturer_run (gvc);
g_signal_connect (G_OBJECT (recbutton), "clicked",
G_CALLBACK (rec_clicked_cb), gvc);
g_signal_connect (G_OBJECT (stopbutton), "clicked",
G_CALLBACK (stop_clicked), gvc);
}
+
void
create_window (void)
{
@@ -81,10 +82,10 @@ create_window (void)
GTK_WIDGET_UNSET_FLAGS (video, GTK_DOUBLE_BUFFERED);
gtk_container_add (GTK_CONTAINER (window), GTK_WIDGET (vbox));
- gtk_box_pack_start (GTK_BOX(vbox), GTK_WIDGET (video), TRUE, TRUE, 0);
- gtk_box_pack_start (GTK_BOX(vbox), GTK_WIDGET (hbox), FALSE, FALSE, 0);
- gtk_box_pack_start(GTK_BOX(hbox), recbutton, TRUE, TRUE, 0);
- gtk_box_pack_start(GTK_BOX(hbox), stopbutton, TRUE, TRUE, 0);
+ gtk_box_pack_start (GTK_BOX (vbox), GTK_WIDGET (video), TRUE, TRUE, 0);
+ gtk_box_pack_start (GTK_BOX (vbox), GTK_WIDGET (hbox), FALSE, FALSE, 0);
+ gtk_box_pack_start (GTK_BOX (hbox), recbutton, TRUE, TRUE, 0);
+ gtk_box_pack_start (GTK_BOX (hbox), stopbutton, TRUE, TRUE, 0);
g_signal_connect (video, "realize", G_CALLBACK (on_realized_cb), NULL);
gtk_widget_show_all (window);
}
@@ -95,7 +96,7 @@ int
main (int argc, char **argv)
{
if (argc != 4) {
- g_print("Usage: test-encoder output_file device_type device-id\n");
+ g_print ("Usage: test-encoder output_file device_type device-id\n");
return 1;
}
gtk_init (&argc, &argv);
diff --git a/libcesarplayer/test-discoverer.c b/libcesarplayer/test-discoverer.c
index 9abc335..013a701 100644
--- a/libcesarplayer/test-discoverer.c
+++ b/libcesarplayer/test-discoverer.c
@@ -31,7 +31,7 @@ main (int argc, char *argv[])
lgm_init_backend (0, NULL);
if (argc != 2) {
- g_print("Usage: test-discoverer file_uri\n");
+ g_print ("Usage: test-discoverer file_uri\n");
return 1;
}
diff --git a/libcesarplayer/test-editor.c b/libcesarplayer/test-editor.c
index fd0c026..7e48377 100644
--- a/libcesarplayer/test-editor.c
+++ b/libcesarplayer/test-editor.c
@@ -29,23 +29,24 @@
static GMainLoop *loop;
static gboolean
-percent_done_cb (GstVideoEditor *remuxer, gfloat percent, GstVideoEditor *editor)
+percent_done_cb (GstVideoEditor * remuxer, gfloat percent,
+ GstVideoEditor * editor)
{
if (percent == 1) {
- g_print("SUCESS!\n");
+ g_print ("SUCESS!\n");
g_main_loop_quit (loop);
} else {
- g_print("----> %f%%\n", percent);
+ g_print ("----> %f%%\n", percent);
}
return TRUE;
}
static gboolean
-error_cb (GstVideoEditor *remuxer, gchar *error, GstVideoEditor *editor)
+error_cb (GstVideoEditor * remuxer, gchar * error, GstVideoEditor * editor)
{
- g_print("ERROR: %s\n", error);
- g_main_loop_quit (loop);
- return FALSE;
+ g_print ("ERROR: %s\n", error);
+ g_main_loop_quit (loop);
+ return FALSE;
}
int
@@ -64,7 +65,8 @@ main (int argc, char *argv[])
gst_video_editor_init_backend (&argc, &argv);
if (argc < 9) {
- g_print("Usage: test-remuxer output_file format bitrate with_audio with_title input_file start stop\n");
+ g_print
+ ("Usage: test-remuxer output_file format bitrate with_audio with_title input_file start stop\n");
return 1;
}
output_file = argv[1];
@@ -73,11 +75,11 @@ main (int argc, char *argv[])
with_audio = (gboolean) g_strtod (argv[4], NULL);
with_title = (gboolean) g_strtod (argv[5], NULL);
- if (!g_strcmp0(format, "mp4")) {
+ if (!g_strcmp0 (format, "mp4")) {
video_encoder = VIDEO_ENCODER_H264;
video_muxer = VIDEO_MUXER_MP4;
audio_encoder = AUDIO_ENCODER_AAC;
- } else if (!g_strcmp0(format, "avi")) {
+ } else if (!g_strcmp0 (format, "avi")) {
video_encoder = VIDEO_ENCODER_MPEG4;
video_muxer = VIDEO_MUXER_AVI;
audio_encoder = AUDIO_ENCODER_MP3;
@@ -88,28 +90,29 @@ main (int argc, char *argv[])
editor = gst_video_editor_new (NULL);
gst_video_editor_set_encoding_format (editor, output_file, video_encoder,
- audio_encoder, video_muxer, bitrate, 128, 1280, 720, 25, 1, with_audio, with_title);
+ audio_encoder, video_muxer, bitrate, 128, 1280, 720, 25, 1, with_audio,
+ with_title);
- for (i=8; i<=argc; i=i+3) {
+ for (i = 8; i <= argc; i = i + 3) {
gchar *title;
- title = g_strdup_printf ("Title %d", i-4);
+ title = g_strdup_printf ("Title %d", i - 4);
- input_file = argv[i-2];
- start = (guint64) g_strtod (argv[i-1], NULL);
+ input_file = argv[i - 2];
+ start = (guint64) g_strtod (argv[i - 1], NULL);
stop = (guint64) g_strtod (argv[i], NULL);
if (g_str_has_suffix (input_file, ".png")) {
gst_video_editor_add_image_segment (editor, input_file, start,
- stop-start, title);
+ stop - start, title);
} else {
- gst_video_editor_add_segment (editor, input_file, start, stop-start,
- (gfloat) 1, title, TRUE);
+ gst_video_editor_add_segment (editor, input_file, start, stop - start,
+ (gfloat) 1, title, TRUE);
}
g_free (title);
}
loop = g_main_loop_new (NULL, FALSE);
g_signal_connect (editor, "error", G_CALLBACK (error_cb), editor);
- g_signal_connect (editor, "percent_completed", G_CALLBACK(percent_done_cb),
+ g_signal_connect (editor, "percent_completed", G_CALLBACK (percent_done_cb),
editor);
gst_video_editor_start (editor);
g_main_loop_run (loop);
@@ -121,4 +124,3 @@ error:
return 1;
}
-
diff --git a/libcesarplayer/test-remuxer.c b/libcesarplayer/test-remuxer.c
index e5a3ba6..164e1f8 100644
--- a/libcesarplayer/test-remuxer.c
+++ b/libcesarplayer/test-remuxer.c
@@ -26,23 +26,23 @@
#include "gst-remuxer.h"
static gboolean
-percent_done_cb (GstRemuxer *remuxer, gfloat percent, GMainLoop *loop)
+percent_done_cb (GstRemuxer * remuxer, gfloat percent, GMainLoop * loop)
{
if (percent == 1) {
- g_print("SUCESS!\n");
+ g_print ("SUCESS!\n");
g_main_loop_quit (loop);
} else {
- g_print("----> %f%%", percent);
+ g_print ("----> %f%%", percent);
}
return TRUE;
}
static gboolean
-error_cb (GstRemuxer *remuxer, gchar *error, GMainLoop *loop)
+error_cb (GstRemuxer * remuxer, gchar * error, GMainLoop * loop)
{
- g_print("ERROR: %s\n", error);
- g_main_loop_quit (loop);
- return TRUE;
+ g_print ("ERROR: %s\n", error);
+ g_main_loop_quit (loop);
+ return TRUE;
}
int
@@ -54,7 +54,7 @@ main (int argc, char *argv[])
gst_remuxer_init_backend (&argc, &argv);
if (argc != 3) {
- g_print("Usage: test-remuxer input_file output_file\n");
+ g_print ("Usage: test-remuxer input_file output_file\n");
return 1;
}
remuxer = gst_remuxer_new (argv[1], argv[2], VIDEO_MUXER_MP4, NULL);
@@ -63,8 +63,7 @@ main (int argc, char *argv[])
g_signal_connect (remuxer, "percent_completed",
G_CALLBACK (percent_done_cb), loop);
- g_signal_connect (remuxer, "error",
- G_CALLBACK (error_cb), loop);
+ g_signal_connect (remuxer, "error", G_CALLBACK (error_cb), loop);
gst_remuxer_start (remuxer);
g_main_loop_run (loop);
diff --git a/libcesarplayer/video-utils.m b/libcesarplayer/video-utils.m
index 7edcd5a..0cb71db 100644
--- a/libcesarplayer/video-utils.m
+++ b/libcesarplayer/video-utils.m
@@ -41,18 +41,18 @@
#endif
GstAutoplugSelectResult
-lgm_filter_video_decoders (GstElement* object, GstPad* arg0,
- GstCaps* arg1, GstElementFactory* arg2, gpointer user_data)
+lgm_filter_video_decoders (GstElement * object, GstPad * arg0,
+ GstCaps * arg1, GstElementFactory * arg2, gpointer user_data)
{
const gchar *name = gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (arg2));
- if (!g_strcmp0(name, "fluvadec")) {
+ if (!g_strcmp0 (name, "fluvadec")) {
return GST_AUTOPLUG_SELECT_SKIP;
}
return GST_AUTOPLUG_SELECT_TRY;
}
guintptr
-lgm_get_window_handle(GdkWindow *window)
+lgm_get_window_handle (GdkWindow * window)
{
guintptr window_handle;
@@ -60,7 +60,7 @@ lgm_get_window_handle(GdkWindow *window)
/* Retrieve window handler from GDK */
#if defined (GDK_WINDOWING_WIN32)
- window_handle = (guintptr)GDK_WINDOW_HWND (window);
+ window_handle = (guintptr) GDK_WINDOW_HWND (window);
#elif defined (GDK_WINDOWING_QUARTZ)
window_handle = (guintptr) gdk_quartz_window_get_nsview (window);
#elif defined (GDK_WINDOWING_X11)
@@ -71,7 +71,7 @@ lgm_get_window_handle(GdkWindow *window)
}
void
-lgm_set_window_handle(GstXOverlay *xoverlay, guintptr window_handle)
+lgm_set_window_handle (GstXOverlay * xoverlay, guintptr window_handle)
{
gst_x_overlay_set_window_handle (xoverlay, window_handle);
}
@@ -79,17 +79,17 @@ lgm_set_window_handle(GstXOverlay *xoverlay, guintptr window_handle)
void
lgm_init_backend (int argc, char **argv)
{
- gst_init(&argc, &argv);
+ gst_init (&argc, &argv);
}
gchar *
-lgm_filename_to_uri (const gchar *filename)
+lgm_filename_to_uri (const gchar * filename)
{
gchar *uri, *path;
GError *err = NULL;
#ifdef G_OS_WIN32
- if (g_path_is_absolute(filename) || !gst_uri_is_valid (filename)) {
+ if (g_path_is_absolute (filename) || !gst_uri_is_valid (filename)) {
#else
if (!gst_uri_is_valid (filename)) {
#endif
@@ -118,11 +118,10 @@ lgm_filename_to_uri (const gchar *filename)
}
GstDiscovererResult
-lgm_discover_uri (
- const gchar *filename, guint64 *duration, guint *width,
- guint *height, guint *fps_n, guint *fps_d, guint *par_n, guint *par_d,
- gchar **container, gchar **video_codec, gchar **audio_codec,
- GError **err)
+lgm_discover_uri (const gchar * filename, guint64 * duration, guint * width,
+ guint * height, guint * fps_n, guint * fps_d, guint * par_n, guint * par_d,
+ gchar ** container, gchar ** video_codec, gchar ** audio_codec,
+ GError ** err)
{
GstDiscoverer *discoverer;
GstDiscovererInfo *info;
@@ -163,8 +162,9 @@ lgm_discover_uri (
if (GST_IS_DISCOVERER_CONTAINER_INFO (sinfo)) {
GstCaps *caps;
- caps = gst_discoverer_stream_info_get_caps (
- GST_DISCOVERER_STREAM_INFO(sinfo));
+ caps =
+ gst_discoverer_stream_info_get_caps (GST_DISCOVERER_STREAM_INFO
+ (sinfo));
*container = gst_pb_utils_get_codec_description (caps);
gst_caps_unref (caps);
}
@@ -181,8 +181,9 @@ lgm_discover_uri (
if (ainfo != NULL) {
GstCaps *caps;
- caps = gst_discoverer_stream_info_get_caps (
- GST_DISCOVERER_STREAM_INFO (ainfo));
+ caps =
+ gst_discoverer_stream_info_get_caps (GST_DISCOVERER_STREAM_INFO
+ (ainfo));
*audio_codec = gst_pb_utils_get_codec_description (caps);
gst_caps_unref (caps);
}
@@ -202,8 +203,9 @@ lgm_discover_uri (
if (vinfo != NULL) {
GstCaps *caps;
- caps = gst_discoverer_stream_info_get_caps (
- GST_DISCOVERER_STREAM_INFO (vinfo));
+ caps =
+ gst_discoverer_stream_info_get_caps (GST_DISCOVERER_STREAM_INFO
+ (vinfo));
*video_codec = gst_pb_utils_get_codec_description (caps);
gst_caps_unref (caps);
*height = gst_discoverer_video_info_get_height (vinfo);
@@ -224,18 +226,18 @@ lgm_discover_uri (
return ret;
}
-GstElement * lgm_create_video_encoder (VideoEncoderType type, guint quality,
+GstElement *
+lgm_create_video_encoder (VideoEncoderType type, guint quality,
GQuark quark, GError ** err)
{
- GstElement * encoder = NULL;
+ GstElement *encoder = NULL;
gchar *name = NULL;
switch (type) {
case VIDEO_ENCODER_MPEG4:
encoder = gst_element_factory_make ("ffenc_mpeg4", "video-encoder");
g_object_set (encoder, "pass", 512,
- "max-key-interval", -1,
- "bitrate", quality * 1000 , NULL);
+ "max-key-interval", -1, "bitrate", quality * 1000, NULL);
name = "FFmpeg mpeg4 video encoder";
break;
@@ -247,27 +249,28 @@ GstElement * lgm_create_video_encoder (VideoEncoderType type, guint quality,
name = "Xvid video encoder";
break;
- case VIDEO_ENCODER_H264: {
+ case VIDEO_ENCODER_H264:{
GstElement *parse;
- gchar *stats_file = g_build_path (G_DIR_SEPARATOR_S, g_get_tmp_dir(),
+ gchar *stats_file = g_build_path (G_DIR_SEPARATOR_S, g_get_tmp_dir (),
"x264.log", NULL);
encoder = gst_element_factory_make ("fluvah264enc", "video-encoder");
parse = gst_element_factory_make ("h264parse", NULL);
if (!encoder || !parse) {
- if (encoder) gst_object_unref (encoder);
- if (parse) gst_object_unref (parse);
- encoder = gst_element_factory_make ("x264enc", "video-encoder");
- g_object_set (encoder, "key-int-max", 25, "pass", 17,
- "speed-preset", 3, "stats-file", stats_file,
- "bitrate", quality, NULL);
- name = "X264 video encoder";
- }
- else {
+ if (encoder)
+ gst_object_unref (encoder);
+ if (parse)
+ gst_object_unref (parse);
+ encoder = gst_element_factory_make ("x264enc", "video-encoder");
+ g_object_set (encoder, "key-int-max", 25, "pass", 17,
+ "speed-preset", 3, "stats-file", stats_file,
+ "bitrate", quality, NULL);
+ name = "X264 video encoder";
+ } else {
GstPad *encoder_sink_pad, *parse_src_pad;
GstElement *bin = gst_bin_new (NULL);
g_object_set (encoder, "bitrate", quality, "keyframe-period", 1000,
- "rate-control", 1, "entropy-mode", 1, NULL);
+ "rate-control", 1, "entropy-mode", 1, NULL);
gst_bin_add_many (GST_BIN (bin), encoder, parse, NULL);
gst_element_link (encoder, parse);
@@ -275,16 +278,14 @@ GstElement * lgm_create_video_encoder (VideoEncoderType type, guint quality,
encoder_sink_pad = gst_element_get_static_pad (encoder, "sink");
parse_src_pad = gst_element_get_static_pad (parse, "src");
- gst_element_add_pad (bin,
- gst_ghost_pad_new ("sink", encoder_sink_pad));
- gst_element_add_pad (bin,
- gst_ghost_pad_new ("src", parse_src_pad));
-
+ gst_element_add_pad (bin, gst_ghost_pad_new ("sink", encoder_sink_pad));
+ gst_element_add_pad (bin, gst_ghost_pad_new ("src", parse_src_pad));
+
gst_object_unref (encoder_sink_pad);
gst_object_unref (parse_src_pad);
encoder = bin;
-
+
name = "Fluendo H264 video encoder";
}
g_free (stats_file);
@@ -294,8 +295,7 @@ GstElement * lgm_create_video_encoder (VideoEncoderType type, guint quality,
case VIDEO_ENCODER_THEORA:
encoder = gst_element_factory_make ("theoraenc", "video-encoder");
g_object_set (encoder, "keyframe-auto", FALSE,
- "keyframe-force", 25,
- "bitrate", quality, NULL);
+ "keyframe-force", 25, "bitrate", quality, NULL);
name = "Theora video encoder";
break;
@@ -321,7 +321,8 @@ GstElement * lgm_create_video_encoder (VideoEncoderType type, guint quality,
return encoder;
}
-GstElement * lgm_create_audio_encoder (AudioEncoderType type, guint quality,
+GstElement *
+lgm_create_audio_encoder (AudioEncoderType type, guint quality,
GQuark quark, GError ** err)
{
GstElement *encoder = NULL;
@@ -330,7 +331,7 @@ GstElement * lgm_create_audio_encoder (AudioEncoderType type, guint quality,
switch (type) {
case AUDIO_ENCODER_MP3:
encoder = gst_element_factory_make ("lamemp3enc", "audio-encoder");
- g_object_set (encoder, "target", 0, "quality", (gfloat)4, NULL);
+ g_object_set (encoder, "target", 0, "quality", (gfloat) 4, NULL);
name = "Mp3 audio encoder";
break;
@@ -360,7 +361,8 @@ GstElement * lgm_create_audio_encoder (AudioEncoderType type, guint quality,
return encoder;
}
-GstElement * lgm_create_muxer (VideoMuxerType type, GQuark quark, GError **err)
+GstElement *
+lgm_create_muxer (VideoMuxerType type, GQuark quark, GError ** err)
{
GstElement *muxer = NULL;
gchar *name = NULL;
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]