[libgrss] Added function feed_channel_fetch_async()
- From: Roberto Guido <rguido src gnome org>
- To: commits-list gnome org
- Cc:
- Subject: [libgrss] Added function feed_channel_fetch_async()
- Date: Thu, 29 Apr 2010 02:40:55 +0000 (UTC)
commit eb39b35d44897647261c270945375f686c6c0aff
Author: Roberto Guido <bob4mail gmail com>
Date: Thu Apr 29 04:41:58 2010 +0200
Added function feed_channel_fetch_async()
NEWS | 4 +
doc/reference/libgrss-sections.txt | 1 +
src/feed-channel.c | 107 +++++++++++++++++++++++++++---------
src/feed-channel.h | 1 +
src/feeds-group.c | 1 +
5 files changed, 88 insertions(+), 26 deletions(-)
---
diff --git a/NEWS b/NEWS
index ad322b0..50521d1 100644
--- a/NEWS
+++ b/NEWS
@@ -1,3 +1,7 @@
+libgrss 0.5 (UNRELEASED)
+==============================================================================
+- Added function feed_channel_fetch_async()
+
libgrss 0.4
==============================================================================
- Added FeedsGroup class, abstraction to parse and produce list of feeds
diff --git a/doc/reference/libgrss-sections.txt b/doc/reference/libgrss-sections.txt
index 357d0fc..c0a4bb2 100644
--- a/doc/reference/libgrss-sections.txt
+++ b/doc/reference/libgrss-sections.txt
@@ -128,6 +128,7 @@ feed_channel_get_update_time
feed_channel_set_update_interval
feed_channel_get_update_interval
feed_channel_fetch
+feed_channel_fetch_async
</SECTION>
<SECTION>
diff --git a/src/feed-channel.c b/src/feed-channel.c
index e80cd23..3ad8dbc 100644
--- a/src/feed-channel.c
+++ b/src/feed-channel.c
@@ -630,6 +630,42 @@ feed_channel_get_update_interval (FeedChannel *channel)
return channel->priv->update_interval;
}
+static gboolean
+quick_and_dirty_parse (FeedChannel *channel, SoupMessage *msg)
+{
+ GList *items;
+ GList *iter;
+ xmlDocPtr doc;
+ FeedParser *parser;
+
+ /*
+ TODO This function is quite inefficent because parses all
+ the feed with a FeedParser and them waste obtained
+ FeedItems. Perhaps a more aimed function in
+ FeedParser would help...
+ */
+
+ doc = content_to_xml (msg->response_body->data, msg->response_body->length);
+
+ if (doc != NULL) {
+ parser = feed_parser_new ();
+ items = feed_parser_parse (parser, channel, doc, NULL);
+
+ if (items != NULL) {
+ for (iter = items; iter; iter = g_list_next (iter))
+ g_object_unref (iter->data);
+ g_list_free (items);
+ }
+
+ g_object_unref (parser);
+ xmlFreeDoc (doc);
+ return TRUE;
+ }
+ else {
+ return FALSE;
+ }
+}
+
/**
* feed_channel_fetch:
* @channel: a #FeedChannel
@@ -647,46 +683,65 @@ feed_channel_fetch (FeedChannel *channel)
{
gboolean ret;
guint status;
- GList *items;
- GList *iter;
- xmlDocPtr doc;
SoupMessage *msg;
SoupSession *session;
- FeedParser *parser;
- /*
- TODO This function is quite inefficent because parses all
- the feed with a FeedParser and them waste obtained
- FeedItems. Perhaps a more aimed function in
- FeedParser would help...
- */
-
- ret = FALSE;
session = soup_session_sync_new ();
msg = soup_message_new ("GET", feed_channel_get_source (channel));
status = soup_session_send_message (session, msg);
if (status >= 200 && status <= 299) {
- doc = content_to_xml (msg->response_body->data, msg->response_body->length);
-
- if (doc != NULL) {
- parser = feed_parser_new ();
- items = feed_parser_parse (parser, channel, doc, NULL);
-
- for (iter = items; iter; iter = g_list_next (iter))
- g_object_unref (iter->data);
- g_list_free (items);
-
- g_object_unref (parser);
- xmlFreeDoc (doc);
- ret = TRUE;
- }
+ ret = quick_and_dirty_parse (channel, msg);
}
else {
g_warning ("Unable to fetch feed from %s: %s", feed_channel_get_source (channel), soup_status_get_phrase (status));
+ ret = FALSE;
}
g_object_unref (session);
g_object_unref (msg);
return ret;
}
+
+static void
+feed_downloaded (SoupSession *session, SoupMessage *msg, gpointer user_data) {
+ guint status;
+ GSimpleAsyncResult *result;
+ FeedChannel *channel;
+
+ result = user_data;
+ channel = FEED_CHANNEL (g_async_result_get_source_object (G_ASYNC_RESULT (result)));
+ g_object_get (msg, "status-code", &status, NULL);
+
+ if (status >= 200 && status <= 299) {
+ quick_and_dirty_parse (channel, msg);
+ }
+ else {
+ g_warning ("Unable to download from %s", feed_channel_get_source (channel));
+ }
+
+ g_simple_async_result_complete_in_idle (result);
+ g_object_unref (result);
+}
+
+/**
+ * feed_channel_fetch_async:
+ * @channel: a #FeedChannel
+ * @callback: function to invoke at the end of the download
+ * @user_data: data passed to the callback
+ *
+ * Similar to feed_channel_fetch(), but asyncronous
+ */
+void
+feed_channel_fetch_async (FeedChannel *channel, GAsyncReadyCallback callback, gpointer user_data)
+{
+ GSimpleAsyncResult *result;
+ SoupMessage *msg;
+ SoupSession *session;
+
+ result = g_simple_async_result_new (G_OBJECT (channel), callback, user_data, feed_channel_fetch_async);
+
+ session = soup_session_async_new ();
+ msg = soup_message_new ("GET", feed_channel_get_source (channel));
+ soup_session_queue_message (session, msg, feed_downloaded, result);
+}
diff --git a/src/feed-channel.h b/src/feed-channel.h
index 72bb461..a7bd36f 100644
--- a/src/feed-channel.h
+++ b/src/feed-channel.h
@@ -84,5 +84,6 @@ void feed_channel_set_update_interval (FeedChannel *channel, int minutes);
int feed_channel_get_update_interval (FeedChannel *channel);
gboolean feed_channel_fetch (FeedChannel *channel);
+void feed_channel_fetch_async (FeedChannel *channel, GAsyncReadyCallback callback, gpointer user_data);
#endif /* __FEED_CHANNEL_H__ */
diff --git a/src/feeds-group.c b/src/feeds-group.c
index 06be717..ea7a515 100644
--- a/src/feeds-group.c
+++ b/src/feeds-group.c
@@ -126,6 +126,7 @@ retrieve_group_handler (FeedsGroup *group, xmlDocPtr doc, xmlNodePtr cur)
iter = g_slist_next (iter);
}
+ g_warning ("No suitable parser has been found.");
return NULL;
}
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]