[geary/wip/778276-better-flag-updates] Convert the account synchronizer's work into account operations.



commit 22f20e163fb5d798ac723597a91ee82fb11a137a
Author: Michael James Gratton <mike vee net>
Date:   Wed Dec 20 21:01:34 2017 +1030

    Convert the account synchronizer's work into account operations.
    
    * src/engine/imap-engine/imap-engine-account-synchronizer.vala: Replace
      the account synchronizer's queue and operations with two account
      operations, one for simply refreshing folders and one for extanding the
      vector back to the prefetch epoch.

 .../imap-engine-account-synchronizer.vala          |  660 ++++++++------------
 .../imap-engine/imap-engine-generic-account.vala   |    3 +-
 .../imap-engine/imap-engine-minimal-folder.vala    |   15 +-
 3 files changed, 255 insertions(+), 423 deletions(-)
---
diff --git a/src/engine/imap-engine/imap-engine-account-synchronizer.vala 
b/src/engine/imap-engine/imap-engine-account-synchronizer.vala
index 274b145..a14fded 100644
--- a/src/engine/imap-engine/imap-engine-account-synchronizer.vala
+++ b/src/engine/imap-engine/imap-engine-account-synchronizer.vala
@@ -1,491 +1,323 @@
-/* Copyright 2016 Software Freedom Conservancy Inc.
+/*
+ * Copyright 2016 Software Freedom Conservancy Inc.
+ * Copyright 2017 Michael Gratton <mike vee net>
  *
  * This software is licensed under the GNU Lesser General Public License
  * (version 2.1 or later).  See the COPYING file in this distribution.
  */
 
 private class Geary.ImapEngine.AccountSynchronizer : Geary.BaseObject {
-    private const int FETCH_DATE_RECEIVED_CHUNK_COUNT = 25;
-    private const int SYNC_DELAY_SEC = 10;
-    private const int RETRY_SYNC_DELAY_SEC = 60;
+
 
     private weak GenericAccount account { get; private set; }
-    private weak Imap.Account remote { get; private set; }
 
-    private Nonblocking.Queue<MinimalFolder> bg_queue =
-        new Nonblocking.Queue<MinimalFolder>.priority(bg_queue_comparator);
-    private Gee.HashSet<MinimalFolder> made_available =
-        new Gee.HashSet<MinimalFolder>();
-    private Gee.HashSet<FolderPath> unavailable_paths =
-        new Gee.HashSet<FolderPath>();
-    private MinimalFolder? current_folder = null;
-    private Cancellable? bg_cancellable = null;
-    private DateTime max_epoch = new DateTime(new TimeZone.local(), 2000, 1, 1, 0, 0, 0.0);
+    private TimeoutManager prefetch_timer;
+    private DateTime max_epoch = new DateTime(
+        new TimeZone.local(), 2000, 1, 1, 0, 0, 0.0
+    );
 
 
-    public AccountSynchronizer(GenericAccount account, Imap.Account remote) {
+    public AccountSynchronizer(GenericAccount account) {
         this.account = account;
-        this.remote = remote;
-
-        // don't allow duplicates because it's possible for a Folder to change several times
-        // before finally opened and synchronized, which we only want to do once
-        this.bg_queue.allow_duplicates = false;
-        this.bg_queue.requeue_duplicate = false;
+        this.prefetch_timer = new TimeoutManager.seconds(
+            10, do_prefetch_changed
+        );
 
         this.account.information.notify["prefetch-period-days"].connect(on_account_prefetch_changed);
-        this.account.folders_available_unavailable.connect(on_folders_available_unavailable);
+        this.account.folders_available_unavailable.connect(on_folders_updated);
         this.account.folders_contents_altered.connect(on_folders_contents_altered);
-        this.remote.ready.connect(on_account_ready);
-    }
-
-    public void stop() {
-        Cancellable? cancellable = this.bg_cancellable;
-        if (cancellable != null) {
-            debug("%s: Stopping...", this.account.to_string());
-            cancellable.cancel();
-
-            this.bg_queue.clear();
-            this.made_available.clear();
-            this.unavailable_paths.clear();
-            this.current_folder = null;
-        }
-    }
-
-    private void on_account_prefetch_changed() {
-        try {
-            // treat as an availability check (i.e. as if the account had just opened) because
-            // just because this value has changed doesn't mean the contents in the folders
-            // have changed
-            if (this.account.is_open()) {
-                delayed_send_all(account.list_folders(), true, SYNC_DELAY_SEC);
-            }
-        } catch (Error err) {
-            debug("Unable to schedule re-sync for %s due to prefetch time changing: %s",
-                account.to_string(), err.message);
-        }
-    }
-    
-    private void on_folders_available_unavailable(Gee.Collection<Folder>? available,
-                                                  Gee.Collection<Folder>? unavailable) {
-        if (available != null) {
-            foreach (Folder folder in available)
-                unavailable_paths.remove(folder.path);
-            
-            delayed_send_all(available, true, SYNC_DELAY_SEC);
-        }
-        
-        if (unavailable != null) {
-            foreach (Folder folder in unavailable)
-                unavailable_paths.add(folder.path);
-            
-            revoke_all(unavailable);
-        }
-    }
-    
-    private void on_folders_contents_altered(Gee.Collection<Folder> altered) {
-        delayed_send_all(altered, false, SYNC_DELAY_SEC);
     }
 
-    private void delayed_send_all(Gee.Collection<Folder> folders, bool reason_available, int sec) {
-        Timeout.add_seconds(sec, () => {
-            // remove any unavailable folders
-            Gee.ArrayList<Folder> trimmed_folders = new Gee.ArrayList<Folder>();
-            foreach (Folder folder in folders) {
-                if (!unavailable_paths.contains(folder.path))
-                    trimmed_folders.add(folder);
-            }
-            
-            send_all(trimmed_folders, reason_available);
-            
-            return false;
-        });
-    }
-    
-    private void send_all(Gee.Collection<Folder> folders, bool reason_available) {
+    private void send_all(Gee.Collection<Folder> folders, bool became_available) {
         foreach (Folder folder in folders) {
             MinimalFolder? imap_folder = folder as MinimalFolder;
-            
             // only deal with ImapEngine.MinimalFolder
             if (imap_folder == null)
                 continue;
-            
+
             // if considering folder not because it's available (i.e. because its contents changed),
             // and the folder is open, don't process it; MinimalFolder will take care of changes as
             // they occur, in order to remain synchronized
-            if (!reason_available &&
+            if (!became_available &&
                 imap_folder.get_open_state() != Folder.OpenState.CLOSED) {
                 continue;
             }
 
-            // don't requeue the currently processing folder
-            if (imap_folder != current_folder)
-                bg_queue.send(imap_folder);
-            
-            // If adding because now available, make sure it's flagged as such, since there's an
-            // additional check for available folders ... if not, remove from the map so it's
-            // not treated as such, in case both of these come in back-to-back
-            if (reason_available && imap_folder != current_folder)
-                made_available.add(imap_folder);
-            else
-                made_available.remove(imap_folder);
+            AccountOperation op = became_available
+                ? new CheckFolderSync(this.account, imap_folder, this.max_epoch)
+                : new RefreshFolderSync(this.account, imap_folder);
+
+            try {
+                this.account.queue_operation(op);
+            } catch (Error err) {
+                debug("Failed to queue sync operation: %s", err.message);
+            }
         }
     }
-    
-    private void revoke_all(Gee.Collection<Folder> folders) {
-        foreach (Folder folder in folders) {
-            MinimalFolder? generic_folder = folder as MinimalFolder;
-            if (generic_folder != null) {
-                bg_queue.revoke(generic_folder);
-                made_available.remove(generic_folder);
+
+    private void do_prefetch_changed() {
+        // treat as an availability check (i.e. as if the account had
+        // just opened) because just because this value has changed
+        // doesn't mean the contents in the folders have changed
+        if (this.account.is_open()) {
+            try {
+                send_all(this.account.list_folders(), true);
+            } catch (Error err) {
+                debug("Failed to list account folders for sync: %s", err.message);
             }
         }
     }
-    
-    // This is used to ensure that certain special folders get prioritized over others, so folders
-    // important to the user (i.e. Inbox) go first while less-used folders (Spam) are fetched last
-    private static int bg_queue_comparator(MinimalFolder a, MinimalFolder b) {
-        if (a == b)
-            return 0;
-        
-        int cmp = score_folder(a) - score_folder(b);
-        if (cmp != 0)
-            return cmp;
-        
-        // sort by path to stabilize the sort
-        return a.path.compare_to(b.path);
+
+    private void on_account_prefetch_changed() {
+        this.prefetch_timer.start();
     }
-    
-    // Lower the score, the higher the importance.
-    //
-    // Some explanation is due here.  It may seem odd to place TRASH, SENT, and DRAFTS so high, but
-    // there's a method to the madness.  In particular, because Geary can produce a lot of drafts
-    // during composition, it's important to synchronize with Trash so discarded drafts don't wind
-    // up included in conversations until, eventually, the Trash is synchronized.  (Recall that
-    // Spam and Trash are blacklisted in conversations and searching.)  Since Drafts is open while
-    // writing them, it's not vital to keep it absolutely high, but Trash is usually not open,
-    // so it should be.
-    //
-    // All Mail is important, but synchronizing with it can be hard on the system because of the
-    // sheer amount of messages, and so it's placed lower to put it off until the more active
-    // folders are finished.
-    private static int score_folder(Folder a) {
-        switch (a.special_folder_type) {
-            case SpecialFolderType.INBOX:
-                return -70;
-            
-            case SpecialFolderType.TRASH:
-                return -60;
-            
-            case SpecialFolderType.SENT:
-                return -50;
-            
-            case SpecialFolderType.DRAFTS:
-                return -40;
-            
-            case SpecialFolderType.FLAGGED:
-                return -30;
-            
-            case SpecialFolderType.IMPORTANT:
-                return -20;
-            
-            case SpecialFolderType.ALL_MAIL:
-            case SpecialFolderType.ARCHIVE:
-                return -10;
-            
-            case SpecialFolderType.SPAM:
-                return 10;
-            
-            default:
-                return 0;
+
+    private void on_folders_updated(Gee.Collection<Folder>? available,
+                                    Gee.Collection<Folder>? unavailable) {
+        if (available != null) {
+            send_all(available, true);
         }
     }
-    
-    private async void process_queue_async() {
-        if (this.bg_cancellable != null) {
-            return;
-        }
-        Cancellable cancellable = this.bg_cancellable = new Cancellable();
 
-        debug("%s: Starting background sync", this.account.to_string());
+    private void on_folders_contents_altered(Gee.Collection<Folder> altered) {
+        send_all(altered, false);
+    }
 
-        while (!cancellable.is_cancelled()) {
-            MinimalFolder folder;
-            try {
-                folder = yield bg_queue.receive(bg_cancellable);
-            } catch (Error err) {
-                if (!(err is IOError.CANCELLED))
-                    debug("Failed to receive next folder for background sync: %s", err.message);
-                break;
-            }
+}
 
-            // generate the current epoch for synchronization (could cache this value, obviously, but
-            // doesn't seem like this biggest win in this class)
-            DateTime epoch;
-            if (account.information.prefetch_period_days >= 0) {
-                epoch = new DateTime.now_local();
-                epoch = epoch.add_days(0 - account.information.prefetch_period_days);
-            } else {
-                epoch = max_epoch;
-            }
+/**
+ * Synchronises a folder after its contents have changed.
+ *
+ * This synchronisation process simply opens the remote folder, waits
+ * for it to finish opening for normalisation and pre-fetching to
+ * complete, then closes it again.
+ */
+private class Geary.ImapEngine.RefreshFolderSync : FolderOperation {
 
-            bool availability_check = false;
-            try {
-                // mark as current folder to prevent requeues while processing
-                this.current_folder = folder;
-                availability_check = this.made_available.remove(folder);
-                yield process_folder_async(folder, availability_check, epoch, cancellable);
-            } catch (Error err) {
-                // retry the folder later
-                delayed_send_all(
-                    iterate<Folder>(folder).to_array_list(),
-                    availability_check,
-                    RETRY_SYNC_DELAY_SEC
-                );
-                if (!(err is IOError.CANCELLED)) {
-                    debug("%s: Error synchronising %s: %s",
-                          this.account.to_string(), folder.to_string(), err.message);
+
+    internal RefreshFolderSync(GenericAccount account,
+                               MinimalFolder folder) {
+        base(account, folder);
+    }
+
+    public override async void execute(Cancellable cancellable)
+        throws Error {
+        bool opened = false;
+        try {
+            yield this.folder.open_async(Folder.OpenFlags.FAST_OPEN, cancellable);
+            opened = true;
+            yield this.folder.wait_for_open_async(cancellable);
+            yield sync_folder(cancellable);
+        } finally {
+            if (opened) {
+                try {
+                    // don't pass in the Cancellable; really need this
+                    // to complete in all cases
+                    yield this.folder.close_async();
+                } catch (Error err) {
+                    debug(
+                        "%s: Error closing folder %s: %s",
+                        this.account.to_string(),
+                        this.folder.to_string(),
+                        err.message
+                    );
                 }
-                break;
-            } finally {
-                this.current_folder = null;
             }
         }
-
-        this.bg_cancellable = null;
     }
 
-    // Returns false if IOError.CANCELLED received
-    private async void process_folder_async(MinimalFolder folder,
-                                            bool availability_check,
-                                            DateTime epoch,
-                                            Cancellable cancellable)
+    protected virtual async void sync_folder(Cancellable cancellable)
         throws Error {
-        Logging.debug(
-            Logging.Flag.PERIODIC,
-            "Background sync'ing %s to %s",
-            folder.to_string(),
-            epoch.to_string()
-        );
+        yield wait_for_prefetcher(cancellable);
+    }
 
-        // If we aren't checking the folder because it became
-        // available, then it has changed and we need to check it.
-        // Otherwise compare the oldest mail in the local store and
-        // see if it is before the epoch; if so, no need to
-        // synchronize simply because this Folder is available; wait
-        // for its contents to change instead.
-        //
-        // Note we can't compare the local and remote folder counts
-        // here, since the folder may not have opened yet to determine
-        // what the actual remote count is, which is particularly
-        // problematic when an existing folder is seen for the first
-        // time, e.g. when the account was just added.
-
-        DateTime? oldest_local = null;
-        Geary.EmailIdentifier? oldest_local_id = null;
-        bool do_sync = true;
-
-        if (!availability_check) {
-            // Folder already available, so it must have changed
+    protected async void wait_for_prefetcher(Cancellable cancellable)
+        throws Error {
+        MinimalFolder minimal = (MinimalFolder) this.folder;
+        try {
+            yield minimal.email_prefetcher.active_sem.wait_async(cancellable);
+        } catch (Error err) {
             Logging.debug(
                 Logging.Flag.PERIODIC,
-                "Folder %s changed, synchronizing...",
-                folder.to_string()
+                "Error waiting for email prefetcher to complete %s: %s",
+                folder.to_string(),
+                err.message
             );
-        } else {
-            // get oldest local email and its time, as well as number
-            // of messages in local store
-            Gee.List<Geary.Email>? list =yield folder.local_folder.list_email_by_id_async(
-                null,
-                1,
-                Email.Field.PROPERTIES,
-                ImapDB.Folder.ListFlags.NONE | ImapDB.Folder.ListFlags.OLDEST_TO_NEWEST,
-                cancellable
+        }
+    }
+
+}
+
+/**
+ * Synchronises a folder after first checking if it needs to be sync'ed.
+ *
+ * This synchronisation process performs the same work as its base
+ * class, but also ensures enough mail has been fetched to satisfy the
+ * account's prefetch period, by checking the earliest mail in the
+ * folder and if later than the maximum prefetch epoch, expands the
+ * folder's vector until it does.
+ */
+private class Geary.ImapEngine.CheckFolderSync : RefreshFolderSync {
+
+
+    private DateTime sync_max_epoch;
+
+
+    internal CheckFolderSync(GenericAccount account,
+                             MinimalFolder folder,
+                             DateTime sync_max_epoch) {
+        base(account, folder);
+        this.sync_max_epoch = sync_max_epoch;
+    }
+
+    protected override async void sync_folder(Cancellable cancellable)
+        throws Error {
+        // Determine the earliest date we should be synchronising back to
+        DateTime prefetch_max_epoch;
+        if (this.account.information.prefetch_period_days >= 0) {
+            prefetch_max_epoch = new DateTime.now_local();
+            prefetch_max_epoch = prefetch_max_epoch.add_days(
+                0 - account.information.prefetch_period_days
             );
-            if (list != null && list.size > 0) {
-                oldest_local = list[0].properties.date_received;
-                oldest_local_id = list[0].id;
-            }
+        } else {
+            prefetch_max_epoch = this.sync_max_epoch;
+        }
 
-            if (oldest_local == null) {
-                // No oldest message found, so we haven't seen the folder
-                // before or it has no messages. Either way we need to
-                // open it to check, so sync it.
-                Logging.debug(
-                    Logging.Flag.PERIODIC,
-                    "No oldest message found for %s, synchronizing...",
-                    folder.to_string()
-                );
-            } else if (oldest_local.compare(epoch) < 0) {
-                // Oldest local email before epoch, don't sync from network
-                do_sync = false;
-                Logging.debug(
-                    Logging.Flag.PERIODIC,
-                    "Oldest local message is older than the epoch for %s",
-                    folder.to_string()
-                );
-            }
+        // get oldest local email and its time, as well as number
+        // of messages in local store
+        ImapDB.Folder local_folder = ((MinimalFolder) this.folder).local_folder;
+        Gee.List<Geary.Email>? list = yield local_folder.list_email_by_id_async(
+            null,
+            1,
+            Email.Field.PROPERTIES,
+            ImapDB.Folder.ListFlags.NONE | ImapDB.Folder.ListFlags.OLDEST_TO_NEWEST,
+            cancellable
+        );
+
+        Geary.Email? current_oldest = null;
+        if (list != null && list.size > 0) {
+            current_oldest = list[0];
         }
 
-        if (do_sync) {
-            bool opened = false;
-            try {
-                yield folder.open_async(Folder.OpenFlags.FAST_OPEN, cancellable);
-                opened = true;
-                yield sync_folder_async(folder, epoch, oldest_local, oldest_local_id, cancellable);
-            } finally {
-                if (opened) {
-                    try {
-                        // don't pass Cancellable; really need this to complete in all cases
-                        yield folder.close_async();
-                    } catch (Error err) {
-                        debug("%s: Error closing folder %s: %s",
-                              this.account.to_string(), folder.to_string(), err.message);
-                    }
-                }
-            }
+        DateTime? oldest_date = (current_oldest != null)
+            ? current_oldest.properties.date_received : null;
+        if (oldest_date == null) {
+            oldest_date = new DateTime.now_local();
         }
-        Logging.debug(
-            Logging.Flag.PERIODIC, "Background sync of %s completed",
-            folder.to_string()
-        );
-    }
 
-    private async void sync_folder_async(MinimalFolder folder,
-                                         DateTime epoch,
-                                         DateTime? oldest_local,
-                                         Geary.EmailIdentifier? oldest_local_id,
-                                         Cancellable cancellable)
-        throws Error {
+        DateTime? next_epoch = oldest_date;
+        while (next_epoch.compare(prefetch_max_epoch) > 0) {
+            int local_count = yield local_folder.get_email_count_async(
+                ImapDB.Folder.ListFlags.NONE, cancellable
+            );
 
-        // wait for the folder to be fully opened to be sure we have all the most current
-        // information
-        yield folder.wait_for_open_async(cancellable);
-        
-        // only perform vector expansion if oldest isn't old enough
-        if (oldest_local == null || oldest_local.compare(epoch) > 0) {
-            // go back three months at a time to the epoch, performing a little vector expansion at a
-            // time rather than all at once (which will stall the replay queue)
-            DateTime current_epoch = (oldest_local != null) ? oldest_local : new DateTime.now_local();
-            do {
-                // look for complete synchronization of UIDs (i.e. complete vector normalization)
-                // no need to keep searching once this happens
-                int local_count = yield 
folder.local_folder.get_email_count_async(ImapDB.Folder.ListFlags.NONE,
-                    cancellable);
-                int remote_count = folder.properties.email_total;
-                if (local_count >= remote_count) {
-                    Logging.debug(
-                        Logging.Flag.PERIODIC,
-                        "Final vector normalization for %s: %d/%d emails",
-                        folder.to_string(),
-                        local_count,
-                        remote_count
-                    );
-                    break;
-                }
-                
-                current_epoch = current_epoch.add_months(-3);
-                
-                // if past max_epoch, then just pull in everything and be done with it
-                if (current_epoch.compare(max_epoch) < 0) {
-                    Logging.debug(
-                        Logging.Flag.PERIODIC,
-                        "Synchronization reached max epoch of %s, fetching all mail from %s (already got %d 
of %d emails)",
-                        max_epoch.to_string(),
-                        folder.to_string(),
-                        local_count,
-                        remote_count
-                    );
+            next_epoch = next_epoch.add_months(-3);
+            if (next_epoch.compare(prefetch_max_epoch) < 0) {
+                next_epoch = prefetch_max_epoch;
+            }
 
-                    // Per the contract for list_email_by_id_async, we
-                    // need to specify int.MAX count and ensure that
-                    // ListFlags.OLDEST_TO_NEWEST is *not* specified
-                    // to get all messages listed.
-                    //
-                    // XXX This is expensive, but should only usually
-                    // happen once per folder - at the end of a full
-                    // sync.
-                    yield folder.list_email_by_id_async(
-                        null,
-                        int.MAX,
-                        Geary.Email.Field.NONE,
-                        Geary.Folder.ListFlags.NONE,
-                        cancellable
-                    );
-                } else {
-                    // don't go past proscribed epoch
-                    if (current_epoch.compare(epoch) < 0)
-                        current_epoch = epoch;
-
-                    Logging.debug(
-                        Logging.Flag.PERIODIC,
-                        "Synchronizing %s to %s (already got %d of %d emails)",
-                        folder.to_string(),
-                        current_epoch.to_string(),
-                        local_count,
-                        remote_count
+            debug("%s *** syncing to: %s", this.account.to_string(), next_epoch.to_string());
+
+            if (local_count < this.folder.properties.email_total &&
+                next_epoch.compare(prefetch_max_epoch) >= 0) {
+                if (next_epoch.compare(this.sync_max_epoch) > 0) {
+                    current_oldest = yield expand_vector(
+                        next_epoch, current_oldest, cancellable
                     );
-                    Geary.EmailIdentifier? earliest_span_id = yield 
folder.find_earliest_email_async(current_epoch,
-                        oldest_local_id, cancellable);
-                    if (earliest_span_id == null && current_epoch.compare(epoch) <= 0) {
-                        Logging.debug(
-                            Logging.Flag.PERIODIC,
-                            "Unable to locate epoch messages on remote folder %s%s, fetching one past 
oldest...",
-                            folder.to_string(),
-                            (oldest_local_id != null) ? " earlier than oldest local" : ""
+                    if (current_oldest == null &&
+                        next_epoch.equal(prefetch_max_epoch)) {
+                        yield expand_to_previous(
+                            current_oldest, cancellable
                         );
-
-                        // if there's nothing between the oldest local and the epoch, that means the
-                        // mail just prior to our local oldest is oldest than the epoch; rather than
-                        // continually thrashing looking for something that's just out of reach, add it
-                        // to the folder and be done with it ... note that this even works if oldest_local_id
-                        // is null, as that means the local folder is empty and so we should at least
-                        // pull the first one to get a marker of age
-                        yield folder.list_email_by_id_async(oldest_local_id, 1, Geary.Email.Field.NONE,
-                            Geary.Folder.ListFlags.NONE, cancellable);
-                    } else if (earliest_span_id != null) {
-                        // use earliest email from that span for the next round
-                        oldest_local_id = earliest_span_id;
+                        // Exit next time around
+                        next_epoch = prefetch_max_epoch.add_days(-1);
                     }
+                } else {
+                    yield expand_complete_vector(cancellable);
+                    // Exit next time around
+                    next_epoch = prefetch_max_epoch.add_days(-1);
                 }
-                
-                yield Scheduler.sleep_ms_async(200);
-            } while (current_epoch.compare(epoch) > 0);
-        } else {
-            Logging.debug(
-                Logging.Flag.PERIODIC,
-                "No expansion necessary for %s, oldest local (%s) is before epoch (%s)",
-                folder.to_string(),
-                oldest_local.to_string(),
-                epoch.to_string()
-            );
+            } else {
+                // Exit next time around
+                next_epoch = prefetch_max_epoch.add_days(-1);
+            }
+
+            // let the prefetcher catch up
+            yield wait_for_prefetcher(cancellable);
         }
+    }
 
-        // always give email prefetcher time to finish its work
+    private async Geary.Email? expand_vector(DateTime next_epoch,
+                                             Geary.Email? current_oldest,
+                                             Cancellable cancellable)
+        throws Error {
+        // Expand the vector up until the given epoch
         Logging.debug(
             Logging.Flag.PERIODIC,
-            "Waiting for email prefetcher to complete %s...",
-            folder.to_string()
+            "Synchronizing %s:%s to %s",
+            this.account.to_string(),
+            this.folder.to_string(),
+            next_epoch.to_string()
         );
-        try {
-            yield folder.email_prefetcher.active_sem.wait_async(cancellable);
-        } catch (Error err) {
-            Logging.debug(
-                Logging.Flag.PERIODIC,
-                "Error waiting for email prefetcher to complete %s: %s",
-                folder.to_string(),
-                err.message
-            );
-        }
+        return yield ((MinimalFolder) this.folder).find_earliest_email_async(
+            next_epoch,
+            (current_oldest != null) ? current_oldest.id : null,
+            cancellable
+        );
+    }
 
+    private async void expand_to_previous(Geary.Email? current_oldest,
+                                          Cancellable cancellable)
+        throws Error {
+        // there's nothing between the oldest local and the epoch,
+        // which means the mail just prior to our local oldest is
+        // oldest than the epoch; rather than continually thrashing
+        // looking for something that's just out of reach, add it to
+        // the folder and be done with it ... note that this even
+        // works if id is null, as that means the local folder is
+        // empty and so we should at least pull the first one to get a
+        // marker of age
+        Geary.EmailIdentifier? id =
+            (current_oldest != null) ? current_oldest.id : null;
         Logging.debug(
             Logging.Flag.PERIODIC,
-            "Done background sync'ing %s",
-            folder.to_string()
+            "Unable to locate epoch messages on remote folder %s:%s%s, fetching one past oldest...",
+            this.account.to_string(),
+            this.folder.to_string(),
+            (id != null) ? " earlier than oldest local" : ""
+        );
+        yield this.folder.list_email_by_id_async(
+            id, 1,
+            Geary.Email.Field.NONE,
+            Geary.Folder.ListFlags.NONE, cancellable
         );
     }
 
-    private void on_account_ready() {
-        this.process_queue_async.begin();
+    private async void expand_complete_vector(Cancellable cancellable)
+        throws Error {
+        // past max_epoch, so just pull in everything and be done with it
+        Logging.debug(
+            Logging.Flag.PERIODIC,
+            "Synchronization reached max epoch of %s, fetching all mail from %s:%s",
+            this.sync_max_epoch.to_string(),
+            this.account.to_string(),
+            this.folder.to_string()
+        );
+
+        // Per the contract for list_email_by_id_async, we need to
+        // specify int.MAX count and ensure that
+        // ListFlags.OLDEST_TO_NEWEST is *not* specified to get all
+        // messages listed.
+        //
+        // XXX This is expensive, but should only usually happen once
+        // per folder - at the end of a full sync.
+        yield this.folder.list_email_by_id_async(
+            null,
+            int.MAX,
+            Geary.Email.Field.NONE,
+            Geary.Folder.ListFlags.NONE,
+            cancellable
+        );
     }
 
 }
diff --git a/src/engine/imap-engine/imap-engine-generic-account.vala 
b/src/engine/imap-engine/imap-engine-generic-account.vala
index ccac9d5..5d0a872 100644
--- a/src/engine/imap-engine/imap-engine-generic-account.vala
+++ b/src/engine/imap-engine/imap-engine-generic-account.vala
@@ -69,7 +69,7 @@ private abstract class Geary.ImapEngine.GenericAccount : Geary.Account {
             search_path = new ImapDB.SearchFolderRoot();
         }
 
-        this.sync = new AccountSynchronizer(this, this.remote);
+        this.sync = new AccountSynchronizer(this);
 
         compile_special_search_names();
     }
@@ -226,7 +226,6 @@ private abstract class Geary.ImapEngine.GenericAccount : Geary.Account {
         // Halt internal tasks early so they stop using local and
         // remote connections.
         this.processor.stop();
-        this.sync.stop();
 
         this.refresh_folder_timer.reset();
 
diff --git a/src/engine/imap-engine/imap-engine-minimal-folder.vala 
b/src/engine/imap-engine/imap-engine-minimal-folder.vala
index 1086375..53ddc62 100644
--- a/src/engine/imap-engine/imap-engine-minimal-folder.vala
+++ b/src/engine/imap-engine/imap-engine-minimal-folder.vala
@@ -1341,7 +1341,7 @@ private class Geary.ImapEngine.MinimalFolder : Geary.Folder, Geary.FolderSupport
     
     // TODO: A proper public search mechanism; note that this always round-trips to the remote,
     // doesn't go through the replay queue, and doesn't deal with messages marked for deletion
-    internal async Geary.EmailIdentifier? find_earliest_email_async(DateTime datetime,
+    internal async Geary.Email? find_earliest_email_async(DateTime datetime,
         Geary.EmailIdentifier? before_id, Cancellable? cancellable) throws Error {
         check_open("find_earliest_email_async");
         if (before_id != null)
@@ -1372,19 +1372,20 @@ private class Geary.ImapEngine.MinimalFolder : Geary.Folder, Geary.FolderSupport
         replay_queue.schedule(op);
         
         yield op.wait_for_ready_async(cancellable);
-        
+
         // find earliest ID; because all Email comes from Folder, UID should always be present
+        Geary.Email? earliest = null;
         ImapDB.EmailIdentifier? earliest_id = null;
         foreach (Geary.Email email in op.accumulator) {
             ImapDB.EmailIdentifier email_id = (ImapDB.EmailIdentifier) email.id;
-            
-            if (earliest_id == null || email_id.uid.compare_to(earliest_id.uid) < 0)
+            if (earliest_id == null || email_id.uid.compare_to(earliest_id.uid) < 0) {
+                earliest = email;
                 earliest_id = email_id;
+            }
         }
-
-        return earliest_id;
+        return earliest;
     }
-    
+
     protected async Geary.EmailIdentifier? create_email_async(RFC822.Message rfc822,
         Geary.EmailFlags? flags, DateTime? date_received, Geary.EmailIdentifier? id,
         Cancellable? cancellable = null) throws Error {


[Date Prev][Date Next]   [Thread Prev][Thread Next]   [Thread Index] [Date Index] [Author Index]