[evolution-data-server] [IMAPx] Fix a concurrent message download issue



commit ca8c1c61ba7dbf1a20c496808da5a0e9ed6cbc89
Author: Milan Crha <mcrha redhat com>
Date:   Wed Oct 24 10:22:06 2018 +0200

    [IMAPx] Fix a concurrent message download issue
    
    When there had been two requests to download one message at the same time
    the later waits for the former to finish, then it reads the message
    from the local cache. The problem is that the later can use the same
    stream as the former, but this stream is re-winded at the end, thus
    the later attempt doesn't read anything and produces basically empty
    message, instead of the proper content.
    
    This could be sometimes observed when viewing message source of
    a message which was not downloaded yet.

 src/camel/providers/imapx/camel-imapx-folder.c | 56 ++++++++++++++++----------
 1 file changed, 34 insertions(+), 22 deletions(-)
---
diff --git a/src/camel/providers/imapx/camel-imapx-folder.c b/src/camel/providers/imapx/camel-imapx-folder.c
index 6e4f9235c..95a5811ff 100644
--- a/src/camel/providers/imapx/camel-imapx-folder.c
+++ b/src/camel/providers/imapx/camel-imapx-folder.c
@@ -574,6 +574,38 @@ exit:
        return success;
 }
 
+static CamelMimeMessage *
+imapx_message_from_stream_sync (CamelIMAPXFolder *imapx_folder,
+                               CamelStream *stream,
+                               GCancellable *cancellable,
+                               GError **error)
+{
+       CamelMimeMessage *msg;
+
+       g_return_val_if_fail (CAMEL_IS_IMAPX_FOLDER (imapx_folder), NULL);
+
+       if (!stream)
+               return NULL;
+
+       msg = camel_mime_message_new ();
+
+       g_mutex_lock (&imapx_folder->stream_lock);
+
+       /* Make sure the stream is at the beginning. It can be, when there are two
+          concurrent requests for a message, they both use the same underlying stream
+          from the local cache (encapsulated in the CamelStream), where one reads
+          it completely and lefts it at the end, thus the second caller reads
+          the stream from a wrong position. */
+       g_seekable_seek (G_SEEKABLE (stream), 0, G_SEEK_SET, cancellable, NULL);
+
+       if (!camel_data_wrapper_construct_from_stream_sync (CAMEL_DATA_WRAPPER (msg), stream, cancellable, 
error))
+               g_clear_object (&msg);
+
+       g_mutex_unlock (&imapx_folder->stream_lock);
+
+       return msg;
+}
+
 static CamelMimeMessage *
 imapx_get_message_cached (CamelFolder *folder,
                          const gchar *message_uid,
@@ -596,18 +628,8 @@ imapx_get_message_cached (CamelFolder *folder,
        }
 
        if (stream != NULL) {
-               gboolean success;
+               msg = imapx_message_from_stream_sync (imapx_folder, stream, cancellable, NULL);
 
-               msg = camel_mime_message_new ();
-
-               g_mutex_lock (&imapx_folder->stream_lock);
-               success = camel_data_wrapper_construct_from_stream_sync (
-                       CAMEL_DATA_WRAPPER (msg), stream, cancellable, NULL);
-               if (!success) {
-                       g_object_unref (msg);
-                       msg = NULL;
-               }
-               g_mutex_unlock (&imapx_folder->stream_lock);
                g_object_unref (stream);
        }
 
@@ -672,18 +694,8 @@ imapx_get_message_sync (CamelFolder *folder,
        }
 
        if (stream != NULL) {
-               gboolean success;
+               msg = imapx_message_from_stream_sync (imapx_folder, stream, cancellable, error);
 
-               msg = camel_mime_message_new ();
-
-               g_mutex_lock (&imapx_folder->stream_lock);
-               success = camel_data_wrapper_construct_from_stream_sync (
-                       CAMEL_DATA_WRAPPER (msg), stream, cancellable, error);
-               if (!success) {
-                       g_object_unref (msg);
-                       msg = NULL;
-               }
-               g_mutex_unlock (&imapx_folder->stream_lock);
                g_object_unref (stream);
        }
 


[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]